var/home/core/zuul-output/0000755000175000017500000000000015113534034014524 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113544567015504 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003113552015113544560017701 0ustar rootrootDec 02 10:01:46 crc systemd[1]: Starting Kubernetes Kubelet... Dec 02 10:01:46 crc restorecon[4581]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:46 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 02 10:01:47 crc restorecon[4581]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 02 10:01:47 crc kubenswrapper[4685]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 10:01:47 crc kubenswrapper[4685]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 02 10:01:47 crc kubenswrapper[4685]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 10:01:47 crc kubenswrapper[4685]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 10:01:47 crc kubenswrapper[4685]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 02 10:01:47 crc kubenswrapper[4685]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.750031 4685 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753824 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753847 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753852 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753857 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753863 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753869 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753873 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753878 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753882 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753887 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753892 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753897 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753903 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753907 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753913 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753917 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753922 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753927 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753932 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753937 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753942 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753947 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753952 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753957 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753963 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753967 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753971 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753975 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753986 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753990 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.753996 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754001 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754005 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754009 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754014 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754019 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754023 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754028 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754032 4685 feature_gate.go:330] unrecognized feature gate: Example Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754036 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754040 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754045 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754049 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754053 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754057 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754061 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754065 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754084 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754092 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754098 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754103 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754108 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754112 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754117 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754122 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754126 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754131 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754136 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754141 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754145 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754148 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754153 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754157 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754162 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754166 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754170 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754175 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754180 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754184 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754188 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.754192 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754514 4685 flags.go:64] FLAG: --address="0.0.0.0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754526 4685 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754535 4685 flags.go:64] FLAG: --anonymous-auth="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754541 4685 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754547 4685 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754552 4685 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754573 4685 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754592 4685 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754597 4685 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754604 4685 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754609 4685 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754614 4685 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754620 4685 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754625 4685 flags.go:64] FLAG: --cgroup-root="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754629 4685 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754634 4685 flags.go:64] FLAG: --client-ca-file="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754640 4685 flags.go:64] FLAG: --cloud-config="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754645 4685 flags.go:64] FLAG: --cloud-provider="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754651 4685 flags.go:64] FLAG: --cluster-dns="[]" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754658 4685 flags.go:64] FLAG: --cluster-domain="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754663 4685 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754669 4685 flags.go:64] FLAG: --config-dir="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754673 4685 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754678 4685 flags.go:64] FLAG: --container-log-max-files="5" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754691 4685 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754698 4685 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754703 4685 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754709 4685 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754715 4685 flags.go:64] FLAG: --contention-profiling="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754719 4685 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754724 4685 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754730 4685 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754736 4685 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754743 4685 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754749 4685 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754754 4685 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754759 4685 flags.go:64] FLAG: --enable-load-reader="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754765 4685 flags.go:64] FLAG: --enable-server="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754770 4685 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754776 4685 flags.go:64] FLAG: --event-burst="100" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754782 4685 flags.go:64] FLAG: --event-qps="50" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754787 4685 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754792 4685 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754797 4685 flags.go:64] FLAG: --eviction-hard="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754803 4685 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754807 4685 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754812 4685 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754817 4685 flags.go:64] FLAG: --eviction-soft="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754821 4685 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754826 4685 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754831 4685 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754836 4685 flags.go:64] FLAG: --experimental-mounter-path="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754840 4685 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754844 4685 flags.go:64] FLAG: --fail-swap-on="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754848 4685 flags.go:64] FLAG: --feature-gates="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754854 4685 flags.go:64] FLAG: --file-check-frequency="20s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754858 4685 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754862 4685 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754867 4685 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754871 4685 flags.go:64] FLAG: --healthz-port="10248" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754876 4685 flags.go:64] FLAG: --help="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754880 4685 flags.go:64] FLAG: --hostname-override="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754884 4685 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754891 4685 flags.go:64] FLAG: --http-check-frequency="20s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754895 4685 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754899 4685 flags.go:64] FLAG: --image-credential-provider-config="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754903 4685 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754907 4685 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754912 4685 flags.go:64] FLAG: --image-service-endpoint="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754916 4685 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754920 4685 flags.go:64] FLAG: --kube-api-burst="100" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754924 4685 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754928 4685 flags.go:64] FLAG: --kube-api-qps="50" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754934 4685 flags.go:64] FLAG: --kube-reserved="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754939 4685 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754942 4685 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754948 4685 flags.go:64] FLAG: --kubelet-cgroups="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754952 4685 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754956 4685 flags.go:64] FLAG: --lock-file="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754960 4685 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754964 4685 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754968 4685 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754975 4685 flags.go:64] FLAG: --log-json-split-stream="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754979 4685 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754984 4685 flags.go:64] FLAG: --log-text-split-stream="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754988 4685 flags.go:64] FLAG: --logging-format="text" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754992 4685 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.754996 4685 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755001 4685 flags.go:64] FLAG: --manifest-url="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755005 4685 flags.go:64] FLAG: --manifest-url-header="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755011 4685 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755015 4685 flags.go:64] FLAG: --max-open-files="1000000" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755021 4685 flags.go:64] FLAG: --max-pods="110" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755025 4685 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755031 4685 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755036 4685 flags.go:64] FLAG: --memory-manager-policy="None" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755040 4685 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755046 4685 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755051 4685 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755056 4685 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755068 4685 flags.go:64] FLAG: --node-status-max-images="50" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755072 4685 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755077 4685 flags.go:64] FLAG: --oom-score-adj="-999" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755082 4685 flags.go:64] FLAG: --pod-cidr="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755086 4685 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755094 4685 flags.go:64] FLAG: --pod-manifest-path="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755099 4685 flags.go:64] FLAG: --pod-max-pids="-1" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755103 4685 flags.go:64] FLAG: --pods-per-core="0" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755107 4685 flags.go:64] FLAG: --port="10250" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755112 4685 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755116 4685 flags.go:64] FLAG: --provider-id="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755120 4685 flags.go:64] FLAG: --qos-reserved="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755125 4685 flags.go:64] FLAG: --read-only-port="10255" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755129 4685 flags.go:64] FLAG: --register-node="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755133 4685 flags.go:64] FLAG: --register-schedulable="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755137 4685 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755145 4685 flags.go:64] FLAG: --registry-burst="10" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755149 4685 flags.go:64] FLAG: --registry-qps="5" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755153 4685 flags.go:64] FLAG: --reserved-cpus="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755157 4685 flags.go:64] FLAG: --reserved-memory="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755163 4685 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755167 4685 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755171 4685 flags.go:64] FLAG: --rotate-certificates="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755176 4685 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755180 4685 flags.go:64] FLAG: --runonce="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755185 4685 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755189 4685 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755193 4685 flags.go:64] FLAG: --seccomp-default="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755197 4685 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755201 4685 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755206 4685 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755211 4685 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755215 4685 flags.go:64] FLAG: --storage-driver-password="root" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755219 4685 flags.go:64] FLAG: --storage-driver-secure="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755224 4685 flags.go:64] FLAG: --storage-driver-table="stats" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755227 4685 flags.go:64] FLAG: --storage-driver-user="root" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755232 4685 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755236 4685 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755240 4685 flags.go:64] FLAG: --system-cgroups="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755244 4685 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755251 4685 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755255 4685 flags.go:64] FLAG: --tls-cert-file="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755259 4685 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755264 4685 flags.go:64] FLAG: --tls-min-version="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755268 4685 flags.go:64] FLAG: --tls-private-key-file="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755272 4685 flags.go:64] FLAG: --topology-manager-policy="none" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755276 4685 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755280 4685 flags.go:64] FLAG: --topology-manager-scope="container" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755286 4685 flags.go:64] FLAG: --v="2" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755292 4685 flags.go:64] FLAG: --version="false" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755298 4685 flags.go:64] FLAG: --vmodule="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755304 4685 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755308 4685 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755421 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755426 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755431 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755435 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755439 4685 feature_gate.go:330] unrecognized feature gate: Example Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755443 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755448 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755452 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755456 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755460 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755464 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755468 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755471 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755475 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755479 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755482 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755486 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755490 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755494 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755498 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755501 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755505 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755509 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755512 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755516 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755519 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755523 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755527 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755530 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755534 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755538 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755548 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755551 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755573 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755578 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755582 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755586 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755590 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755594 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755599 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755603 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755607 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755611 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755615 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755620 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755624 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755628 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755633 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755636 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755640 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755644 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755648 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755651 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755655 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755658 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755661 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755665 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755668 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755672 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755677 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755682 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755686 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755690 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755695 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755700 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755703 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755707 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755712 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755715 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755719 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.755723 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.755738 4685 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.764898 4685 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.764945 4685 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765061 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765073 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765081 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765086 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765092 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765097 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765102 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765107 4685 feature_gate.go:330] unrecognized feature gate: Example Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765112 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765116 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765121 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765125 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765130 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765135 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765140 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765145 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765149 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765154 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765158 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765163 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765170 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765180 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765185 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765191 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765196 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765203 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765208 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765213 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765219 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765225 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765231 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765237 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765242 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765247 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765255 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765262 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765267 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765272 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765277 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765282 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765287 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765292 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765297 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765302 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765306 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765311 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765316 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765320 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765324 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765330 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765334 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765339 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765344 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765348 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765353 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765358 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765363 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765370 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765376 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765381 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765386 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765391 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765396 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765402 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765407 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765412 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765417 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765422 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765427 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765432 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765438 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.765448 4685 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765646 4685 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765660 4685 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765668 4685 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765673 4685 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765679 4685 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765684 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765690 4685 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765695 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765700 4685 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765712 4685 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765717 4685 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765722 4685 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765726 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765731 4685 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765736 4685 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765741 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765746 4685 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765751 4685 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765755 4685 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765763 4685 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765769 4685 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765775 4685 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765780 4685 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765785 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765790 4685 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765795 4685 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765800 4685 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765804 4685 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765809 4685 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765814 4685 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765820 4685 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765825 4685 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765830 4685 feature_gate.go:330] unrecognized feature gate: Example Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765835 4685 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765842 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765847 4685 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765851 4685 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765856 4685 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765861 4685 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765866 4685 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765870 4685 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765876 4685 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765881 4685 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765886 4685 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765891 4685 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765895 4685 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765900 4685 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765905 4685 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765910 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765914 4685 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765919 4685 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765924 4685 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765929 4685 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765934 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765939 4685 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765945 4685 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765951 4685 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765957 4685 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765962 4685 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765967 4685 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765973 4685 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765977 4685 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765983 4685 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765988 4685 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765993 4685 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.765998 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.766003 4685 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.766007 4685 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.766012 4685 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.766017 4685 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.766023 4685 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.766032 4685 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.766541 4685 server.go:940] "Client rotation is on, will bootstrap in background" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.769445 4685 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.769548 4685 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.770217 4685 server.go:997] "Starting client certificate rotation" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.770237 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.771243 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-18 07:37:48.024693111 +0000 UTC Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.771418 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.776690 4685 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.778793 4685 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.782246 4685 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.787001 4685 log.go:25] "Validated CRI v1 runtime API" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.807261 4685 log.go:25] "Validated CRI v1 image API" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.809007 4685 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.811157 4685 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-02-09-56-50-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.811189 4685 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.821355 4685 manager.go:217] Machine: {Timestamp:2025-12-02 10:01:47.82060308 +0000 UTC m=+0.192377254 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:02c3f37c-e07c-4f04-a10b-4347c058c238 BootID:709be170-eacf-4169-b401-3651c2ad2713 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e3:79:d8 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e3:79:d8 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e5:87:c1 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:7f:07:fb Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:5b:ab:1c Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:87:f3:1a Speed:-1 Mtu:1496} {Name:eth10 MacAddress:1a:a0:a4:6c:af:dd Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:62:bf:cf:51:88:a3 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.821508 4685 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.821634 4685 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822127 4685 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822275 4685 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822308 4685 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822501 4685 topology_manager.go:138] "Creating topology manager with none policy" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822512 4685 container_manager_linux.go:303] "Creating device plugin manager" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822735 4685 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.822763 4685 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823047 4685 state_mem.go:36] "Initialized new in-memory state store" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823123 4685 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823739 4685 kubelet.go:418] "Attempting to sync node with API server" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823759 4685 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823780 4685 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823792 4685 kubelet.go:324] "Adding apiserver pod source" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.823803 4685 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.825762 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.825843 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.825757 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.825919 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.826037 4685 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.826380 4685 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827086 4685 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827528 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827551 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827574 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827584 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827596 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827603 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827609 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827619 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827626 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827632 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827668 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827674 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.827855 4685 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.828185 4685 server.go:1280] "Started kubelet" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.828486 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.828720 4685 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.829257 4685 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 02 10:01:47 crc systemd[1]: Started Kubernetes Kubelet. Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.829872 4685 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.833527 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.833786 4685 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.834703 4685 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.834731 4685 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.835067 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 11:06:04.4134305 +0000 UTC Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.835496 4685 server.go:460] "Adding debug handlers to kubelet server" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.835547 4685 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.835524 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.150:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d5dbfe8fd7607 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 10:01:47.828164103 +0000 UTC m=+0.199938257,LastTimestamp:2025-12-02 10:01:47.828164103 +0000 UTC m=+0.199938257,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.838242 4685 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.838817 4685 factory.go:55] Registering systemd factory Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.838844 4685 factory.go:221] Registration of the systemd container factory successfully Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.838854 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" interval="200ms" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.839140 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.839368 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.839605 4685 factory.go:153] Registering CRI-O factory Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.840644 4685 factory.go:221] Registration of the crio container factory successfully Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.840726 4685 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.840758 4685 factory.go:103] Registering Raw factory Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.840777 4685 manager.go:1196] Started watching for new ooms in manager Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.841412 4685 manager.go:319] Starting recovery of all containers Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.855641 4685 manager.go:324] Recovery completed Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.862480 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.866212 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.866248 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.866267 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868740 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868781 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868799 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868811 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868821 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868837 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868850 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868867 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868882 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868899 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868910 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868923 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868938 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868957 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868970 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.868982 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869000 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869012 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869023 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869612 4685 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869652 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869667 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869686 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869698 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869710 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869725 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869738 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869758 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869774 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869791 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869804 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869821 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869832 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869847 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869860 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869872 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869888 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869899 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869915 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869927 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869939 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869954 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869968 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869980 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.869997 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870008 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870022 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870033 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870043 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870055 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870064 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870081 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870093 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870108 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870122 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870134 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870146 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870155 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870167 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870177 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870191 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870203 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870216 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870232 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870243 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870258 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870270 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870283 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870301 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870314 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870325 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870340 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870353 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870368 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870380 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870392 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870407 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870425 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870440 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870452 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870462 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870521 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870534 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870551 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870610 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870623 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870637 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870649 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870677 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870690 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870718 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870735 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870747 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870762 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870774 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870786 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870801 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870814 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870827 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870844 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870855 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870870 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870883 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870895 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870910 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870940 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870956 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870974 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.870991 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871004 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871021 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871039 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871052 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871068 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871080 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871097 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871112 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871126 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871137 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871149 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871165 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871176 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871193 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871205 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871216 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871230 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871253 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871270 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871287 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.871299 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872824 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872855 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872871 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872885 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872897 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872909 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872922 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872934 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872947 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872959 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872972 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872984 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.872998 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873009 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873022 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873033 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873044 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873056 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873067 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873079 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873089 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873102 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873116 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873128 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873141 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873153 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873165 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873176 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873188 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873199 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873211 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873224 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873235 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873248 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873261 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873272 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873285 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873299 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873313 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873325 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873339 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873351 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873364 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873375 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873387 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873399 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873411 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873423 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873434 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873449 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873460 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873473 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873486 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873507 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873518 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873530 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873542 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873593 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873610 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873623 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873635 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873647 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873658 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873672 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873684 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873696 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873712 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873725 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873737 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873754 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873766 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873779 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873794 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873843 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873854 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873865 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873877 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873887 4685 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873900 4685 reconstruct.go:97] "Volume reconstruction finished" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.873929 4685 reconciler.go:26] "Reconciler: start to sync state" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.874672 4685 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.874702 4685 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.874730 4685 state_mem.go:36] "Initialized new in-memory state store" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.885370 4685 policy_none.go:49] "None policy: Start" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.891402 4685 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.891445 4685 state_mem.go:35] "Initializing new in-memory state store" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.896289 4685 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.898369 4685 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.898409 4685 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.898435 4685 kubelet.go:2335] "Starting kubelet main sync loop" Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.898505 4685 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 02 10:01:47 crc kubenswrapper[4685]: W1202 10:01:47.899141 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.899187 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.940798 4685 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.951629 4685 manager.go:334] "Starting Device Plugin manager" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.951848 4685 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.951921 4685 server.go:79] "Starting device plugin registration server" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.953022 4685 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.953378 4685 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.953915 4685 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.954021 4685 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.954035 4685 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 02 10:01:47 crc kubenswrapper[4685]: E1202 10:01:47.962314 4685 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.999274 4685 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 10:01:47 crc kubenswrapper[4685]: I1202 10:01:47.999350 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.000233 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.000270 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.000281 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.000499 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.000685 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.000721 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001425 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001446 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001425 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001576 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001586 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001674 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001817 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.001846 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.002522 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.002540 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.002548 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.002646 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.002761 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.002791 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003013 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003025 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003161 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003237 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003525 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003542 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003914 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003960 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003983 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.003996 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004019 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004027 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004044 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004062 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004111 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.004128 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.005319 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.005359 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.005378 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: E1202 10:01:48.039415 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" interval="400ms" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.054474 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.055903 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.055992 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.056057 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.056130 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 10:01:48 crc kubenswrapper[4685]: E1202 10:01:48.056578 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.150:6443: connect: connection refused" node="crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075361 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075423 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075459 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075491 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075524 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075552 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075608 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075636 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075665 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075692 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.075910 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.076075 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.076235 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.076290 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.076328 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.177968 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178039 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178071 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178103 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178131 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178159 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178185 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178212 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178238 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178288 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178388 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178430 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178470 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178513 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.178551 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179188 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179302 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179295 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179332 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179384 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179394 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179435 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179445 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179463 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179483 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179501 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179527 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179517 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179624 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.179702 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.257233 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.258444 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.258474 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.258482 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.258503 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 10:01:48 crc kubenswrapper[4685]: E1202 10:01:48.258913 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.150:6443: connect: connection refused" node="crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.338777 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.363813 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: W1202 10:01:48.366965 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-4a3171a1823eee70ecc0a519eb7b83a1f2403c74734335315751da81501271b3 WatchSource:0}: Error finding container 4a3171a1823eee70ecc0a519eb7b83a1f2403c74734335315751da81501271b3: Status 404 returned error can't find the container with id 4a3171a1823eee70ecc0a519eb7b83a1f2403c74734335315751da81501271b3 Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.371508 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: W1202 10:01:48.381805 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-1d30f59386c108e2945877b194694528e6bd2f3aef31609a8ac5caeeafc1b760 WatchSource:0}: Error finding container 1d30f59386c108e2945877b194694528e6bd2f3aef31609a8ac5caeeafc1b760: Status 404 returned error can't find the container with id 1d30f59386c108e2945877b194694528e6bd2f3aef31609a8ac5caeeafc1b760 Dec 02 10:01:48 crc kubenswrapper[4685]: W1202 10:01:48.382847 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-4729b8931c8f83d5aebfde18cdda5945a2661c4fe5fbf239bf35e1a77a07696c WatchSource:0}: Error finding container 4729b8931c8f83d5aebfde18cdda5945a2661c4fe5fbf239bf35e1a77a07696c: Status 404 returned error can't find the container with id 4729b8931c8f83d5aebfde18cdda5945a2661c4fe5fbf239bf35e1a77a07696c Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.390022 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.395505 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:48 crc kubenswrapper[4685]: W1202 10:01:48.415102 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-ce0b40a9e3bf6b5470c0d62f3cacb4359e500ddf633c8881a9907cb16ec69f59 WatchSource:0}: Error finding container ce0b40a9e3bf6b5470c0d62f3cacb4359e500ddf633c8881a9907cb16ec69f59: Status 404 returned error can't find the container with id ce0b40a9e3bf6b5470c0d62f3cacb4359e500ddf633c8881a9907cb16ec69f59 Dec 02 10:01:48 crc kubenswrapper[4685]: W1202 10:01:48.419939 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c7b104fdc42af103866950af428e8e072dc99615bf9b2a8fd5c55c4bfd139f2e WatchSource:0}: Error finding container c7b104fdc42af103866950af428e8e072dc99615bf9b2a8fd5c55c4bfd139f2e: Status 404 returned error can't find the container with id c7b104fdc42af103866950af428e8e072dc99615bf9b2a8fd5c55c4bfd139f2e Dec 02 10:01:48 crc kubenswrapper[4685]: E1202 10:01:48.440460 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" interval="800ms" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.659645 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.660919 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.660975 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.660987 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.661021 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 10:01:48 crc kubenswrapper[4685]: E1202 10:01:48.661876 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.150:6443: connect: connection refused" node="crc" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.829342 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.835420 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 23:53:02.540173956 +0000 UTC Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.903866 4685 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9f1412bce19ffb0b85c13e4050db7f1af9d9b59bc77df75f59afb1ed31985bb0" exitCode=0 Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.903974 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9f1412bce19ffb0b85c13e4050db7f1af9d9b59bc77df75f59afb1ed31985bb0"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.904233 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4a3171a1823eee70ecc0a519eb7b83a1f2403c74734335315751da81501271b3"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.904351 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.905209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.905245 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.905256 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.905597 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.905633 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c7b104fdc42af103866950af428e8e072dc99615bf9b2a8fd5c55c4bfd139f2e"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.907219 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918" exitCode=0 Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.907291 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.907339 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ce0b40a9e3bf6b5470c0d62f3cacb4359e500ddf633c8881a9907cb16ec69f59"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.907429 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.908107 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.908135 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.908146 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.910588 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911273 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911295 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911306 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911742 4685 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="40ec097350c65916b4091d8bd1c7d8cf238d96baed6ff31d8691d7df101174e2" exitCode=0 Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911810 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"40ec097350c65916b4091d8bd1c7d8cf238d96baed6ff31d8691d7df101174e2"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911838 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4729b8931c8f83d5aebfde18cdda5945a2661c4fe5fbf239bf35e1a77a07696c"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.911921 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.912866 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.912919 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.912931 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.913546 4685 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="75efce96cb5046d0161b110885f9e4dc50050a1e6b51355887d7b8b8f378358e" exitCode=0 Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.913591 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"75efce96cb5046d0161b110885f9e4dc50050a1e6b51355887d7b8b8f378358e"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.913608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"1d30f59386c108e2945877b194694528e6bd2f3aef31609a8ac5caeeafc1b760"} Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.913662 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.914326 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.914363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:48 crc kubenswrapper[4685]: I1202 10:01:48.914376 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:49 crc kubenswrapper[4685]: W1202 10:01:49.049676 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:49 crc kubenswrapper[4685]: E1202 10:01:49.049791 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:49 crc kubenswrapper[4685]: W1202 10:01:49.208458 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:49 crc kubenswrapper[4685]: E1202 10:01:49.208578 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:49 crc kubenswrapper[4685]: E1202 10:01:49.243664 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" interval="1.6s" Dec 02 10:01:49 crc kubenswrapper[4685]: W1202 10:01:49.287256 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:49 crc kubenswrapper[4685]: E1202 10:01:49.287330 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:49 crc kubenswrapper[4685]: W1202 10:01:49.294691 4685 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.150:6443: connect: connection refused Dec 02 10:01:49 crc kubenswrapper[4685]: E1202 10:01:49.294751 4685 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.150:6443: connect: connection refused" logger="UnhandledError" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.464988 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.467107 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.467155 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.467167 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.467213 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 10:01:49 crc kubenswrapper[4685]: E1202 10:01:49.467697 4685 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.150:6443: connect: connection refused" node="crc" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.836512 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 06:59:45.150157621 +0000 UTC Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.836593 4685 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1076h57m55.313568115s for next certificate rotation Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.909878 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.920474 4685 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="15cf88fc6e3e5321971e275876c4526efcacf20dd11e55055ceb94d65d7c190a" exitCode=0 Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.920602 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"15cf88fc6e3e5321971e275876c4526efcacf20dd11e55055ceb94d65d7c190a"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.920749 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.922334 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.922364 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.922373 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.928215 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"aab00e7e9790fbd4295688d53a144090cac33beeb7b0b88a4be9881b60e7dbfd"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.928319 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.929941 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.929961 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.929970 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.936206 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"10f433e8bee31cdd2cda738be337293c56edb150a034d3b97344d0c422cf8067"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.936269 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7e9bd0f336493d5c923738caf57048bc9806fc5d25dd4344196aa3aecb12c188"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.936289 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"81c3a5d87ab5e5d26d3df950765de900966a822cea144f0deb49ecc0c87d4082"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.936400 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.941950 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.941996 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.942010 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.950810 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.950987 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.951053 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.951190 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.952061 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.952224 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.952283 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.955971 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.956018 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b"} Dec 02 10:01:49 crc kubenswrapper[4685]: I1202 10:01:49.956028 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1"} Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.501818 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.654669 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.961683 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212"} Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.961735 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536"} Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.961769 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.963007 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.963057 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.963074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.965721 4685 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="66167bf8e50eef5bfddca00af54b5601d2b7e336873e636a64f0f080290972c5" exitCode=0 Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.965821 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"66167bf8e50eef5bfddca00af54b5601d2b7e336873e636a64f0f080290972c5"} Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.965914 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.965962 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.966646 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.966989 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.967030 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.967043 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.967216 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.967246 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.967257 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.968096 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.968145 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:50 crc kubenswrapper[4685]: I1202 10:01:50.968162 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.068307 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.069631 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.069675 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.069687 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.069713 4685 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972061 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fd71f5388e32d4280a8eb97c70b73313acb0a952bcab7798d55bd2b38b8c24e1"} Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972109 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"963856aff110f633bfeb49e277dc101bec99f6a8b151c16c83fd1b430c3836f2"} Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972124 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"aa9be10a5a831a1077dcfda7151f5f723630db1f6c543635953dee34b43d1887"} Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972128 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972197 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972234 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972155 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972136 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4026782ed0441be4cb0ca02d296e6dc881bbad731993a1a48a8683b476d067c9"} Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.972335 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ed0295213427bf2af34f611ef504fa5e3a6f72aa4168d1b22ce10328e5e5314c"} Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.974754 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.974793 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.974806 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.975445 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.975480 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.975513 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.975685 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.975722 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:51 crc kubenswrapper[4685]: I1202 10:01:51.975741 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.125098 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.377946 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.974545 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.974613 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.974629 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.975670 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.975698 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.975710 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.975720 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.975725 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:52 crc kubenswrapper[4685]: I1202 10:01:52.975734 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.118669 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.530287 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.530431 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.531465 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.531496 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.531506 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.536196 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.977080 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.977148 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.977955 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.978218 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.978594 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.979165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.979215 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.979250 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.979222 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.979266 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.979383 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.980458 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.980524 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:53 crc kubenswrapper[4685]: I1202 10:01:53.980547 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.465189 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.979548 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.980149 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.980536 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.980583 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.980608 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.981054 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.981181 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:54 crc kubenswrapper[4685]: I1202 10:01:54.981291 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:55 crc kubenswrapper[4685]: I1202 10:01:55.577488 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:01:55 crc kubenswrapper[4685]: I1202 10:01:55.577703 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:55 crc kubenswrapper[4685]: I1202 10:01:55.578959 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:55 crc kubenswrapper[4685]: I1202 10:01:55.578993 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:55 crc kubenswrapper[4685]: I1202 10:01:55.579005 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:57 crc kubenswrapper[4685]: E1202 10:01:57.962450 4685 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.438619 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.438853 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.440617 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.440692 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.440716 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.444525 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.989360 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.990228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.990266 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:01:58 crc kubenswrapper[4685]: I1202 10:01:58.990275 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:01:59 crc kubenswrapper[4685]: I1202 10:01:59.829763 4685 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 02 10:01:59 crc kubenswrapper[4685]: E1202 10:01:59.911736 4685 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 02 10:02:00 crc kubenswrapper[4685]: I1202 10:02:00.505263 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 10:02:00 crc kubenswrapper[4685]: I1202 10:02:00.505348 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 10:02:00 crc kubenswrapper[4685]: I1202 10:02:00.511916 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 02 10:02:00 crc kubenswrapper[4685]: I1202 10:02:00.512005 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 02 10:02:01 crc kubenswrapper[4685]: I1202 10:02:01.439709 4685 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:02:01 crc kubenswrapper[4685]: I1202 10:02:01.439807 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.162708 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.162904 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.165284 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.165441 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.165468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.183994 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.385622 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.385835 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.386206 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.386264 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.387957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.388021 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.388042 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.390834 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.905515 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 10:02:02 crc kubenswrapper[4685]: I1202 10:02:02.905627 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000043 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000553 4685 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000762 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000789 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000950 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.000996 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.001844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.001881 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.001899 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.971151 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 02 10:02:03 crc kubenswrapper[4685]: I1202 10:02:03.987388 4685 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 02 10:02:05 crc kubenswrapper[4685]: E1202 10:02:05.508694 4685 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.515314 4685 trace.go:236] Trace[938736649]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 10:01:52.058) (total time: 13457ms): Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[938736649]: ---"Objects listed" error: 13457ms (10:02:05.515) Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[938736649]: [13.45707263s] [13.45707263s] END Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.515721 4685 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.515551 4685 trace.go:236] Trace[447894836]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 10:01:52.423) (total time: 13091ms): Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[447894836]: ---"Objects listed" error: 13091ms (10:02:05.515) Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[447894836]: [13.0919937s] [13.0919937s] END Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.515923 4685 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.976121 4685 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.976663 4685 trace.go:236] Trace[2077045631]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 10:01:52.304) (total time: 13671ms): Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[2077045631]: ---"Objects listed" error: 13671ms (10:02:05.976) Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[2077045631]: [13.671539003s] [13.671539003s] END Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.976700 4685 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.976951 4685 trace.go:236] Trace[1440993775]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Dec-2025 10:01:51.662) (total time: 14314ms): Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[1440993775]: ---"Objects listed" error: 14314ms (10:02:05.976) Dec 02 10:02:05 crc kubenswrapper[4685]: Trace[1440993775]: [14.314711716s] [14.314711716s] END Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.976977 4685 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.985767 4685 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.986064 4685 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.987209 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.987249 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.987260 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.987278 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:05 crc kubenswrapper[4685]: I1202 10:02:05.987289 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:05Z","lastTransitionTime":"2025-12-02T10:02:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: E1202 10:02:06.006199 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"709be170-eacf-4169-b401-3651c2ad2713\\\",\\\"systemUUID\\\":\\\"02c3f37c-e07c-4f04-a10b-4347c058c238\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.021030 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.021064 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.021072 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.021088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.021098 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: E1202 10:02:06.037927 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"709be170-eacf-4169-b401-3651c2ad2713\\\",\\\"systemUUID\\\":\\\"02c3f37c-e07c-4f04-a10b-4347c058c238\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.043690 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.043734 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.043747 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.043763 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.043775 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: E1202 10:02:06.054060 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"709be170-eacf-4169-b401-3651c2ad2713\\\",\\\"systemUUID\\\":\\\"02c3f37c-e07c-4f04-a10b-4347c058c238\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.062264 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.062304 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.062315 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.062333 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.062344 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: E1202 10:02:06.075661 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"709be170-eacf-4169-b401-3651c2ad2713\\\",\\\"systemUUID\\\":\\\"02c3f37c-e07c-4f04-a10b-4347c058c238\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.082282 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.082328 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.082338 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.082356 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.082368 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: E1202 10:02:06.093600 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"709be170-eacf-4169-b401-3651c2ad2713\\\",\\\"systemUUID\\\":\\\"02c3f37c-e07c-4f04-a10b-4347c058c238\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:06 crc kubenswrapper[4685]: E1202 10:02:06.093759 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.095279 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.095319 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.095332 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.095353 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.095364 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.196886 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.196916 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.196924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.196938 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.196947 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.299153 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.299204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.299214 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.299228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.299239 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.401748 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.401779 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.401787 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.401800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.401810 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.504779 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.505050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.505134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.505223 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.505324 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.539389 4685 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:56298->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.540083 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:56298->192.168.126.11:17697: read: connection reset by peer" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.608382 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.608614 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.608623 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.608636 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.608644 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.643759 4685 csr.go:261] certificate signing request csr-rzt5s is approved, waiting to be issued Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.710484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.710734 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.710823 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.710902 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.710978 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.813738 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.813796 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.813813 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.813840 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.813858 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.836340 4685 apiserver.go:52] "Watching apiserver" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.916458 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.916508 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.916520 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.916540 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:06 crc kubenswrapper[4685]: I1202 10:02:06.916552 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:06Z","lastTransitionTime":"2025-12-02T10:02:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.016684 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.018103 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.018424 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.018486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.018550 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.018649 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.019459 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212" exitCode=255 Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.019502 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.121094 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.121126 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.121136 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.121148 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.121157 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.223053 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.223097 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.223105 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.223119 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.223128 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.325476 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.325511 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.325520 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.325533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.325542 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.427764 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.428294 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.428380 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.428466 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.428555 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.530966 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.531001 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.531009 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.531023 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.531032 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.633424 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.633479 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.633490 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.633506 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.633518 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.703533 4685 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.703793 4685 scope.go:117] "RemoveContainer" containerID="baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.703919 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.704197 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.704352 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.704516 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:07 crc kubenswrapper[4685]: E1202 10:02:07.704656 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.704905 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:07 crc kubenswrapper[4685]: E1202 10:02:07.705016 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.705056 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.705281 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:07 crc kubenswrapper[4685]: E1202 10:02:07.706017 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.724901 4685 csr.go:257] certificate signing request csr-rzt5s is issued Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.738185 4685 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739232 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739395 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739408 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739455 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739331 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739345 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.739361 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890277 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890333 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890371 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890402 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890425 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890457 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890486 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890506 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890529 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890553 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890591 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890609 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890629 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890648 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890666 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890691 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890722 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890745 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890772 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890793 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890817 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890847 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890866 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890888 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890912 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890929 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890950 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.890974 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891001 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891021 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891049 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891081 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891113 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891136 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891167 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891191 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891212 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891240 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891261 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891288 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891313 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891335 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891365 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891388 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891408 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891432 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891460 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891481 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891510 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891534 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891553 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891588 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891607 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891627 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891644 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891662 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891680 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891695 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891723 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891749 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891767 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891783 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891807 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891826 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891843 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891869 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891898 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891916 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891942 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891961 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891977 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.891995 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892013 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892031 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892046 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892064 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892088 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892110 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892130 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892149 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892199 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892219 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892240 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892259 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892275 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892293 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892311 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892328 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892348 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892368 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892384 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892405 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892424 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892445 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892462 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892480 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892502 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892519 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892540 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892575 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892594 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892611 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892631 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892652 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892667 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892691 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892709 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892727 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892746 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892766 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892785 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892803 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892823 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892847 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892882 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892914 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892941 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892957 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.892975 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893018 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893041 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893058 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893078 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893104 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893125 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893145 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893165 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893185 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893202 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893222 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893241 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893258 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893278 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893304 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893329 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893355 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893412 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893431 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893458 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893484 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893508 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893528 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893547 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893601 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893623 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893642 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893663 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893682 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893707 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893738 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893759 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893780 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893799 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893816 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893837 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893856 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893875 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893892 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893913 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.893992 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894017 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894037 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894056 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894073 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894092 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894115 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894141 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894162 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894183 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894201 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894223 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894244 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894264 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894283 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894304 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894324 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894341 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894362 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894390 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894421 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894450 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894480 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894498 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894516 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894539 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894576 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894594 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894614 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894641 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894662 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894679 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894699 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894720 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894739 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894759 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894816 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894866 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894912 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894933 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894956 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894976 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.894999 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895022 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895043 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895073 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895095 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895115 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895142 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.895816 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.896012 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.898238 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.898772 4685 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 02 10:02:07 crc kubenswrapper[4685]: W1202 10:02:07.899053 4685 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:07 crc kubenswrapper[4685]: W1202 10:02:07.899252 4685 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.899539 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.967318 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.967354 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.967460 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.968184 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.968393 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.968427 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.968869 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.968883 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.969182 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.969457 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.969514 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.969776 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.969865 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.969887 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970097 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970157 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970161 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: E1202 10:02:07.898899 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-console/events\": read tcp 38.102.83.150:53714->38.102.83.150:6443: use of closed network connection" event="&Event{ObjectMeta:{networking-console-plugin-85b44fc459-gdk6g.187d5dc489cd2807 openshift-network-console 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-network-console,Name:networking-console-plugin-85b44fc459-gdk6g,UID:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8,APIVersion:v1,ResourceVersion:25349,FieldPath:,},Reason:NetworkNotReady,Message:network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 10:02:07.705999367 +0000 UTC m=+20.077773531,LastTimestamp:2025-12-02 10:02:07.705999367 +0000 UTC m=+20.077773531,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970197 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970472 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970574 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.970824 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.971013 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.971194 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.971573 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.971758 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.971991 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.972150 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.972222 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.972304 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.972573 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: W1202 10:02:07.972686 4685 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.973855 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.974228 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: W1202 10:02:07.974277 4685 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.974626 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.974640 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.974861 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.975931 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976099 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976198 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976226 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976237 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976252 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976263 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:07Z","lastTransitionTime":"2025-12-02T10:02:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976434 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976719 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976730 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.976875 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.977042 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.977178 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.977743 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.977830 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.977909 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.978083 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.977977 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.978213 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.978379 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.978404 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.978791 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979265 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979456 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979524 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979636 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979745 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979788 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979930 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979938 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.979987 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980006 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980164 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980230 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980255 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980329 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980339 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980518 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980631 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980682 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980735 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.980879 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981081 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981123 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981160 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981383 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981443 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981664 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981774 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981928 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981932 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981947 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.981982 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.982113 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.982291 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.982456 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.982763 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.982768 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.982975 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.983391 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.983654 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.983872 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984043 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984190 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984336 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984463 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984669 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984692 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984752 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984847 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.984917 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.985226 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.985306 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.985428 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.985683 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.985841 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.985879 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.986775 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.986994 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.987495 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.987581 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.988137 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.988209 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.988461 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.988664 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.988678 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.988858 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.989287 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.989344 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.989568 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.989832 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.989974 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.989975 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990027 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990451 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990469 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990614 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990791 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990850 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.990860 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991045 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991137 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991343 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991550 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991628 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991788 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991790 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.991956 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992141 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992235 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992282 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992532 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992636 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992863 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.992923 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993057 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993256 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993509 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993718 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993901 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993956 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.993920 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994048 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994205 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994412 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994537 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994639 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994869 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.994917 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.995146 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.995358 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.995400 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.995416 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.995758 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.995816 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:07 crc kubenswrapper[4685]: I1202 10:02:07.997872 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.998669 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.999064 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.999234 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.999425 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.999658 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.999829 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:07.999834 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.000537 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.000674 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.000690 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.001321 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.001403 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.001418 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.001410 4685 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.001572 4685 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.001698 4685 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.001829 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.001840 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002138 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002164 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002203 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002432 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002453 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.002606 4685 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002619 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002886 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.002896 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.007259 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.007319 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.007380 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:08.507361229 +0000 UTC m=+20.879135453 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.010116 4685 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.010678 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.010769 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.010889 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:08.510866774 +0000 UTC m=+20.882640938 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.012181 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.012755 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.017682 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.019673 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:08.519650924 +0000 UTC m=+20.891425078 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.019924 4685 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.019939 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.019949 4685 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.019958 4685 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.022845 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.032297 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.035792 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.061915 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.062166 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.062187 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.062199 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.062260 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:08.562244433 +0000 UTC m=+20.934018587 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.067052 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.068370 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.068414 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.068430 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.068512 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:08.568485573 +0000 UTC m=+20.940259727 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.072834 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.074203 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.082330 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.083001 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.093419 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.111290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.111349 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.111363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.111380 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.111392 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.120997 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121205 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121317 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121483 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121606 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121684 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121760 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122169 4685 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.121922 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122280 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122432 4685 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122572 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122649 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122744 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122823 4685 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122896 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122967 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123038 4685 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123110 4685 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123191 4685 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123340 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123424 4685 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123495 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123595 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123675 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.122998 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123759 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123883 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123897 4685 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123909 4685 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123921 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123933 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123945 4685 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123957 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.123994 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124008 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124020 4685 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124031 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124045 4685 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124055 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124066 4685 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124078 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124090 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124103 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124114 4685 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124125 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124136 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124146 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124157 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124169 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124179 4685 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124190 4685 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124202 4685 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124213 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124223 4685 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124233 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124243 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124255 4685 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124266 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124276 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124286 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124296 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124306 4685 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124316 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124325 4685 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124335 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124346 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124356 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124367 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124378 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124387 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124396 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124407 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124417 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124426 4685 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124436 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124449 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124460 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124469 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124480 4685 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124492 4685 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124503 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124515 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124538 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124549 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124578 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124590 4685 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124603 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124614 4685 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124625 4685 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124636 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124649 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124660 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124671 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124681 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124691 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124701 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124712 4685 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124721 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124732 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124743 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124754 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124766 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124777 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124788 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124799 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124817 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124825 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124833 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124841 4685 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124848 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124856 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124864 4685 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124872 4685 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124907 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124917 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124926 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124934 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124942 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124950 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124959 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124970 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124981 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124991 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.124998 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125007 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125015 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125024 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125033 4685 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125042 4685 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125051 4685 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125059 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125067 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125075 4685 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125083 4685 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125092 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125102 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125112 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125119 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125128 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125135 4685 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125143 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125151 4685 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125159 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125167 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125175 4685 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125183 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125192 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125199 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125207 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125215 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125223 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125282 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125295 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125307 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125318 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125327 4685 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125335 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125343 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125353 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125360 4685 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125368 4685 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125377 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125385 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125393 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125403 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125414 4685 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125422 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125430 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125438 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125446 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125454 4685 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125462 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125470 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125477 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125486 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125494 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125502 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125511 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125519 4685 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125527 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125535 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125543 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125551 4685 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125576 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125584 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125591 4685 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125601 4685 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125608 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125615 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125623 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125631 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125640 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125648 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.125656 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.155375 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.165060 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.179857 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.189853 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.202065 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.202369 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.213019 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.214035 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.214066 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.214076 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.214090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.214100 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.234114 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.263366 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.285072 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.311922 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.317058 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.318416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.318445 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.318458 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.318474 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.318485 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.341017 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.355836 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.358197 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.390782 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.421739 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.421788 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.421800 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.421816 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.421827 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.457266 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.467201 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.481984 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.512551 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.523978 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.524007 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.524015 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.524029 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.524038 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.529007 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.529088 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.529118 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.529186 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:09.529133659 +0000 UTC m=+21.900907813 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.529194 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.529260 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.529308 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:09.529297704 +0000 UTC m=+21.901071928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.529377 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:09.529345835 +0000 UTC m=+21.901119989 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.531871 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.542183 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.551813 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-kdc2q"] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.552050 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-7b6sn"] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.552244 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-mttcn"] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.552438 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.552973 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-qp5vt"] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.553529 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.554006 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.554115 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.560003 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.560191 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.565365 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.565433 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.565928 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.565969 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.566445 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.566632 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.566951 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.567151 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.565726 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.567328 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.567508 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.582081 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.582394 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.582506 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.616370 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629410 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629448 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629460 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629478 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629491 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629637 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-kubelet\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629678 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-conf-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629721 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629742 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/022e694c-9367-4013-8ee9-65ff856e8eec-proxy-tls\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629762 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a09db319-6f1b-4944-8097-6df042ad0869-multus-daemon-config\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629792 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629836 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-k8s-cni-cncf-io\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.629924 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.629945 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.629958 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.630004 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:09.629988454 +0000 UTC m=+22.001762678 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.629955 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-cni-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630047 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-os-release\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630069 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a09db319-6f1b-4944-8097-6df042ad0869-cni-binary-copy\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630089 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630111 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/022e694c-9367-4013-8ee9-65ff856e8eec-rootfs\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630131 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lsw8\" (UniqueName: \"kubernetes.io/projected/022e694c-9367-4013-8ee9-65ff856e8eec-kube-api-access-2lsw8\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630152 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-cni-multus\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630171 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx95c\" (UniqueName: \"kubernetes.io/projected/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-kube-api-access-dx95c\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630193 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/022e694c-9367-4013-8ee9-65ff856e8eec-mcd-auth-proxy-config\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630212 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-socket-dir-parent\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630233 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq6tc\" (UniqueName: \"kubernetes.io/projected/6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0-kube-api-access-rq6tc\") pod \"node-resolver-kdc2q\" (UID: \"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\") " pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630253 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-system-cni-dir\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630271 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cnibin\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630292 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0-hosts-file\") pod \"node-resolver-kdc2q\" (UID: \"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\") " pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630316 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630339 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-system-cni-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630359 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-netns\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630382 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm9w8\" (UniqueName: \"kubernetes.io/projected/a09db319-6f1b-4944-8097-6df042ad0869-kube-api-access-zm9w8\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630414 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-cni-bin\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630443 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-hostroot\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630464 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-multus-certs\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630483 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-cnibin\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630502 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-etc-kubernetes\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.630521 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-os-release\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.630689 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.630704 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.630713 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.630741 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:09.630730535 +0000 UTC m=+22.002504689 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.644832 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.655334 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.666051 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.679967 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.693778 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.702759 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"022e694c-9367-4013-8ee9-65ff856e8eec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7b6sn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.714028 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.728909 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-02 09:57:07 +0000 UTC, rotation deadline is 2026-09-16 03:39:14.870923267 +0000 UTC Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.729098 4685 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6905h37m6.141830645s for next certificate rotation Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.730838 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/022e694c-9367-4013-8ee9-65ff856e8eec-mcd-auth-proxy-config\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.730960 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-socket-dir-parent\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731047 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq6tc\" (UniqueName: \"kubernetes.io/projected/6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0-kube-api-access-rq6tc\") pod \"node-resolver-kdc2q\" (UID: \"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\") " pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731130 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-system-cni-dir\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731242 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cnibin\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731327 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0-hosts-file\") pod \"node-resolver-kdc2q\" (UID: \"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\") " pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731428 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-system-cni-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731259 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-system-cni-dir\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731143 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731635 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/022e694c-9367-4013-8ee9-65ff856e8eec-mcd-auth-proxy-config\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731493 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cnibin\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731693 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-system-cni-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-socket-dir-parent\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731721 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0-hosts-file\") pod \"node-resolver-kdc2q\" (UID: \"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\") " pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731729 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-netns\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731656 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-netns\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.731994 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm9w8\" (UniqueName: \"kubernetes.io/projected/a09db319-6f1b-4944-8097-6df042ad0869-kube-api-access-zm9w8\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732098 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-cni-bin\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732206 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-hostroot\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-multus-certs\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732390 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-cnibin\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732446 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-hostroot\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732479 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-multus-certs\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732516 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-cnibin\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732402 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-cni-bin\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732775 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-etc-kubernetes\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732870 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-os-release\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-kubelet\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733039 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733128 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-conf-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733224 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733318 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/022e694c-9367-4013-8ee9-65ff856e8eec-proxy-tls\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733401 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a09db319-6f1b-4944-8097-6df042ad0869-multus-daemon-config\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733509 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-k8s-cni-cncf-io\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733620 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-cni-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733711 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-os-release\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733827 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a09db319-6f1b-4944-8097-6df042ad0869-cni-binary-copy\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733913 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734022 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/022e694c-9367-4013-8ee9-65ff856e8eec-rootfs\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734129 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lsw8\" (UniqueName: \"kubernetes.io/projected/022e694c-9367-4013-8ee9-65ff856e8eec-kube-api-access-2lsw8\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734230 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-cni-multus\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734329 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx95c\" (UniqueName: \"kubernetes.io/projected/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-kube-api-access-dx95c\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734419 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-run-k8s-cni-cncf-io\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734361 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/022e694c-9367-4013-8ee9-65ff856e8eec-rootfs\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734392 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-cni-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734240 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733670 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-multus-conf-dir\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734794 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733663 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733059 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734855 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734867 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734883 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734901 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a09db319-6f1b-4944-8097-6df042ad0869-cni-binary-copy\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734896 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.734925 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a09db319-6f1b-4944-8097-6df042ad0869-multus-daemon-config\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732990 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-os-release\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.733041 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-kubelet\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.732773 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-etc-kubernetes\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.735044 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-os-release\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.735045 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a09db319-6f1b-4944-8097-6df042ad0869-host-var-lib-cni-multus\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.745060 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/022e694c-9367-4013-8ee9-65ff856e8eec-proxy-tls\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.746244 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.773323 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq6tc\" (UniqueName: \"kubernetes.io/projected/6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0-kube-api-access-rq6tc\") pod \"node-resolver-kdc2q\" (UID: \"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\") " pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.774003 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lsw8\" (UniqueName: \"kubernetes.io/projected/022e694c-9367-4013-8ee9-65ff856e8eec-kube-api-access-2lsw8\") pod \"machine-config-daemon-7b6sn\" (UID: \"022e694c-9367-4013-8ee9-65ff856e8eec\") " pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.776204 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm9w8\" (UniqueName: \"kubernetes.io/projected/a09db319-6f1b-4944-8097-6df042ad0869-kube-api-access-zm9w8\") pod \"multus-mttcn\" (UID: \"a09db319-6f1b-4944-8097-6df042ad0869\") " pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.780430 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx95c\" (UniqueName: \"kubernetes.io/projected/9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9-kube-api-access-dx95c\") pod \"multus-additional-cni-plugins-qp5vt\" (UID: \"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\") " pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.781890 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.793015 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.806989 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mttcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a09db319-6f1b-4944-8097-6df042ad0869\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm9w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mttcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.817863 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.819889 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.820450 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qp5vt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.833804 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.836274 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.836308 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.836316 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.836330 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.836340 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.841730 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kdc2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq6tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kdc2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.874875 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-mttcn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.898690 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.899148 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.899321 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.899360 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:08 crc kubenswrapper[4685]: E1202 10:02:08.899452 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.912493 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.924070 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-kdc2q" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.939150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.939394 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.939489 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.939644 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.939732 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:08Z","lastTransitionTime":"2025-12-02T10:02:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.941352 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7bvm"] Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.947618 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.952350 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.952442 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.952669 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.952784 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.952922 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.955534 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.955642 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.974809 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: I1202 10:02:08.987809 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.987838 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9555ebe3_c169_45ad_97d0_3a7eb6b2b4b9.slice/crio-879ee7d45c0117b4c1b5c1d2f7a99257245f1bb12b9695e54fcd65ddf0f27dbe WatchSource:0}: Error finding container 879ee7d45c0117b4c1b5c1d2f7a99257245f1bb12b9695e54fcd65ddf0f27dbe: Status 404 returned error can't find the container with id 879ee7d45c0117b4c1b5c1d2f7a99257245f1bb12b9695e54fcd65ddf0f27dbe Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.990078 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod022e694c_9367_4013_8ee9_65ff856e8eec.slice/crio-528a34a8a04eb892aff28d857b480f283be0af73b053243c1bace75a36e13ef9 WatchSource:0}: Error finding container 528a34a8a04eb892aff28d857b480f283be0af73b053243c1bace75a36e13ef9: Status 404 returned error can't find the container with id 528a34a8a04eb892aff28d857b480f283be0af73b053243c1bace75a36e13ef9 Dec 02 10:02:08 crc kubenswrapper[4685]: W1202 10:02:08.990756 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f3ce8f0_ee0d_40c7_bab2_842a08fe40c0.slice/crio-a872bd59b58fb3a9f0acc44f040783c309860df690682d15e7e0fe732015e160 WatchSource:0}: Error finding container a872bd59b58fb3a9f0acc44f040783c309860df690682d15e7e0fe732015e160: Status 404 returned error can't find the container with id a872bd59b58fb3a9f0acc44f040783c309860df690682d15e7e0fe732015e160 Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.019977 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037184 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037267 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-bin\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037344 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-slash\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037400 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovn-node-metrics-cert\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037421 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-kubelet\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037441 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-netns\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037483 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-node-log\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037505 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-script-lib\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037573 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-ovn\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037595 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-config\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037650 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037672 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxjtj\" (UniqueName: \"kubernetes.io/projected/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-kube-api-access-hxjtj\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037714 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-log-socket\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037735 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-netd\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037757 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037801 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-env-overrides\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037822 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-systemd-units\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037841 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-systemd\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037897 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-etc-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.037925 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-var-lib-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.042062 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.042082 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.042090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.042102 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.042109 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.049810 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.050264 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.072544 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.092959 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mttcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a09db319-6f1b-4944-8097-6df042ad0869\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm9w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mttcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.116740 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"2ed797a9da1b9e786a8b7e12fcfe041b2f51955e7943f22101eacbecc202a75f"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.116790 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a13433306d801246905c24a3b6a7766c683ff8f111adf0e8d2ed2571166c403d"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.120979 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qp5vt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.137100 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kdc2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq6tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kdc2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138634 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-systemd-units\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138671 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-systemd\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138705 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-etc-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138727 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-var-lib-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138767 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-systemd-units\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138814 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138840 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-bin\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138851 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-etc-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138863 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-slash\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138885 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovn-node-metrics-cert\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138892 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-systemd\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138907 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-kubelet\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138925 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-var-lib-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138931 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-netns\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138930 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-slash\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138964 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-kubelet\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138955 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-node-log\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138995 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-node-log\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139003 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-script-lib\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139039 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-netns\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-ovn\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139109 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-config\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139126 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-log-socket\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139142 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139158 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxjtj\" (UniqueName: \"kubernetes.io/projected/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-kube-api-access-hxjtj\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139192 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-netd\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139212 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139242 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-env-overrides\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139836 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-bin\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139857 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-log-socket\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139923 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-ovn\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139921 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.138903 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-ovn-kubernetes\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139973 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-netd\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.139870 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-env-overrides\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.140015 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-openvswitch\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.140428 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-config\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.143862 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-script-lib\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.147955 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e9753d1025a5cf2ae4137bd5e5ea96bdda07b78091fc7e7a16e7f11dfa252f97"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.148058 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.148074 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.148087 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.148087 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovn-node-metrics-cert\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.148100 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.148175 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.158964 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.170041 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxjtj\" (UniqueName: \"kubernetes.io/projected/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-kube-api-access-hxjtj\") pod \"ovnkube-node-p7bvm\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.180447 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"08e62151b43e6004b6b90c765b92a235026d1958804ee1e888a6d129101fbebd"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.180493 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8d0da329d14e5230d6489dfdc1dc2580c3148e6439248adc98955e57083f567f"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.185125 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mttcn" event={"ID":"a09db319-6f1b-4944-8097-6df042ad0869","Type":"ContainerStarted","Data":"2f97464e162a9e58eb4a6a719581fcefffe57e4701284b679bcc736a38304851"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.191767 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.196866 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.210920 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.211275 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.211398 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"022e694c-9367-4013-8ee9-65ff856e8eec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7b6sn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.216182 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kdc2q" event={"ID":"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0","Type":"ContainerStarted","Data":"a872bd59b58fb3a9f0acc44f040783c309860df690682d15e7e0fe732015e160"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.218648 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"528a34a8a04eb892aff28d857b480f283be0af73b053243c1bace75a36e13ef9"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.225812 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerStarted","Data":"879ee7d45c0117b4c1b5c1d2f7a99257245f1bb12b9695e54fcd65ddf0f27dbe"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.230367 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7bvm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.248277 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.261844 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.261844 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.261901 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.261909 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.261924 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.262042 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.263816 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08e62151b43e6004b6b90c765b92a235026d1958804ee1e888a6d129101fbebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.264918 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.276111 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.292749 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.316861 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mttcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a09db319-6f1b-4944-8097-6df042ad0869\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm9w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mttcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.335119 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qp5vt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.345846 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kdc2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq6tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kdc2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.360464 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.365199 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.365228 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.365236 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.365249 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.365258 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.375042 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.380086 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.395181 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"022e694c-9367-4013-8ee9-65ff856e8eec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7b6sn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.423098 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7bvm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.439517 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.442060 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.445389 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.453914 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.467184 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.467989 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.468031 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.468101 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.468127 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.468140 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.543280 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.543384 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:11.543361383 +0000 UTC m=+23.915135537 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.543434 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.543504 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.543544 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.543608 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:11.54359749 +0000 UTC m=+23.915371644 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.543615 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.543654 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:11.543644001 +0000 UTC m=+23.915418155 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.570133 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.570165 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.570175 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.570188 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.570199 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.644261 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.644307 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644413 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644428 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644438 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644479 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:11.644466876 +0000 UTC m=+24.016241030 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644524 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644531 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644538 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.644577 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:11.644549898 +0000 UTC m=+24.016324042 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.671951 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.672213 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.672297 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.672366 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.672430 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.774974 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.775239 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.775330 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.775416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.775495 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.877983 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.878321 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.878331 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.878346 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.878357 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.899608 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:09 crc kubenswrapper[4685]: E1202 10:02:09.899922 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.903111 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.903757 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.904435 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.905091 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.905691 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.906194 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.906797 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.907365 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.908190 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.909760 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.910680 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.911600 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.912233 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.912923 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.913585 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.914084 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.916073 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.916451 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.916997 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.918007 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.918793 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.919348 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.920181 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.920961 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.921936 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.922499 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.923868 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.924370 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.925884 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.926422 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.926916 4685 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.927447 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.929837 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.930361 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.931327 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.933544 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.934223 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.935757 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.939663 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.940372 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.941506 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.942166 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.944847 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.945939 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.946955 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.947518 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.949416 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.950124 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.950982 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.951436 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.952320 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.952893 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.953417 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.954227 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.980654 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.980865 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.980947 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.981063 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:09 crc kubenswrapper[4685]: I1202 10:02:09.981142 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:09Z","lastTransitionTime":"2025-12-02T10:02:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.083717 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.083764 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.083776 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.083795 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.083808 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.185892 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.185926 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.185935 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.185951 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.185962 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.228902 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mttcn" event={"ID":"a09db319-6f1b-4944-8097-6df042ad0869","Type":"ContainerStarted","Data":"852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.230543 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-kdc2q" event={"ID":"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0","Type":"ContainerStarted","Data":"6efa0a21904c20ef6a9b1cec51fbd1de75478ffefe3ebbdb72e23e5435e4b801"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.232013 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"00df96ed31a4ff1f27f81d9b6285f99cd9e8b772b3075a47f4aab32fef975541"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.232043 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.233827 4685 generic.go:334] "Generic (PLEG): container finished" podID="9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9" containerID="c54ed33d20f8dfb88d78e5b909c7434c806e11ef2c5cfa45ba2881c561d5314c" exitCode=0 Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.233888 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerDied","Data":"c54ed33d20f8dfb88d78e5b909c7434c806e11ef2c5cfa45ba2881c561d5314c"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.236197 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6981f2ae9ba522f439b46d7d007e74c76885e9c0096bb0dab5943622e60c31bd"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.237821 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" exitCode=0 Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.237905 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.237932 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"53ea63d6f1b2731641a6920b622a764c9921dcf8e0f8549ea1518d90b2e39f71"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.259404 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.292303 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.292366 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.292381 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.292404 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.292413 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.294808 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mttcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a09db319-6f1b-4944-8097-6df042ad0869\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm9w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mttcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.315889 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qp5vt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.349675 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08e62151b43e6004b6b90c765b92a235026d1958804ee1e888a6d129101fbebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.364667 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.384411 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kdc2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq6tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kdc2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.396498 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.396533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.396543 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.396556 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.396579 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.406177 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.420826 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.450968 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.470830 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"022e694c-9367-4013-8ee9-65ff856e8eec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7b6sn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.504946 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7bvm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.508240 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.508283 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.508294 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.508311 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.508323 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.524997 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.547197 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.563363 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.577036 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.598060 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.616700 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.616735 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.616742 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.616755 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.616764 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.619913 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"022e694c-9367-4013-8ee9-65ff856e8eec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00df96ed31a4ff1f27f81d9b6285f99cd9e8b772b3075a47f4aab32fef975541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7b6sn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.638361 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7bvm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.656091 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.677647 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.694698 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08e62151b43e6004b6b90c765b92a235026d1958804ee1e888a6d129101fbebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.711122 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.718487 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.718513 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.718521 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.718533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.718542 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.728316 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6981f2ae9ba522f439b46d7d007e74c76885e9c0096bb0dab5943622e60c31bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed797a9da1b9e786a8b7e12fcfe041b2f51955e7943f22101eacbecc202a75f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.742850 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mttcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a09db319-6f1b-4944-8097-6df042ad0869\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm9w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mttcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.755734 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c54ed33d20f8dfb88d78e5b909c7434c806e11ef2c5cfa45ba2881c561d5314c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c54ed33d20f8dfb88d78e5b909c7434c806e11ef2c5cfa45ba2881c561d5314c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qp5vt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.781683 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kdc2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6efa0a21904c20ef6a9b1cec51fbd1de75478ffefe3ebbdb72e23e5435e4b801\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq6tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kdc2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.821067 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.821104 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.821117 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.821134 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.821145 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.872816 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-9rrkd"] Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.873205 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.876346 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.877061 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.877085 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.877883 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.892444 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-9rrkd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tp9p9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:10Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-9rrkd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.899202 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.899200 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:10 crc kubenswrapper[4685]: E1202 10:02:10.899329 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:10 crc kubenswrapper[4685]: E1202 10:02:10.899369 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.908591 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.922735 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.922770 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.922782 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.922796 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.922806 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:10Z","lastTransitionTime":"2025-12-02T10:02:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.931211 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.956690 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08e62151b43e6004b6b90c765b92a235026d1958804ee1e888a6d129101fbebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.958974 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-serviceca\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.959013 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp9p9\" (UniqueName: \"kubernetes.io/projected/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-kube-api-access-tp9p9\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.959039 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-host\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.973421 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:10 crc kubenswrapper[4685]: I1202 10:02:10.987347 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6981f2ae9ba522f439b46d7d007e74c76885e9c0096bb0dab5943622e60c31bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed797a9da1b9e786a8b7e12fcfe041b2f51955e7943f22101eacbecc202a75f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:10Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.001295 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-mttcn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a09db319-6f1b-4944-8097-6df042ad0869\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zm9w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-mttcn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.014770 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c54ed33d20f8dfb88d78e5b909c7434c806e11ef2c5cfa45ba2881c561d5314c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c54ed33d20f8dfb88d78e5b909c7434c806e11ef2c5cfa45ba2881c561d5314c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dx95c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qp5vt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.025160 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.025187 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.025197 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.025213 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.025224 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.030390 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-kdc2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f3ce8f0-ee0d-40c7-bab2-842a08fe40c0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6efa0a21904c20ef6a9b1cec51fbd1de75478ffefe3ebbdb72e23e5435e4b801\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rq6tc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-kdc2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.046035 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.059891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-serviceca\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.060115 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp9p9\" (UniqueName: \"kubernetes.io/projected/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-kube-api-access-tp9p9\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.060222 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-host\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.060389 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-host\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.061951 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-serviceca\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.066913 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"022e694c-9367-4013-8ee9-65ff856e8eec\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00df96ed31a4ff1f27f81d9b6285f99cd9e8b772b3075a47f4aab32fef975541\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2lsw8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7b6sn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.084151 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp9p9\" (UniqueName: \"kubernetes.io/projected/c77c98af-6f5d-416a-9ed1-09b2dd7dfeca-kube-api-access-tp9p9\") pod \"node-ca-9rrkd\" (UID: \"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca\") " pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.087894 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hxjtj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:02:08Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p7bvm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.108641 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6403fe5e-b48b-47ed-ad54-0c1a89a58899\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-02T10:02:06Z\\\",\\\"message\\\":\\\"le observer\\\\nW1202 10:02:05.518666 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1202 10:02:05.518795 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1202 10:02:05.975858 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2318185725/tls.crt::/tmp/serving-cert-2318185725/tls.key\\\\\\\"\\\\nI1202 10:02:06.368342 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1202 10:02:06.435054 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1202 10:02:06.437670 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1202 10:02:06.437752 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1202 10:02:06.437760 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1202 10:02:06.448685 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1202 10:02:06.448718 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448725 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1202 10:02:06.448730 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1202 10:02:06.448734 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1202 10:02:06.448738 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1202 10:02:06.448742 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1202 10:02:06.448998 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1202 10:02:06.453542 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:50Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.123064 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.128138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.128163 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.128171 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.128182 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.128191 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.186011 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-9rrkd" Dec 02 10:02:11 crc kubenswrapper[4685]: W1202 10:02:11.209780 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc77c98af_6f5d_416a_9ed1_09b2dd7dfeca.slice/crio-13fba3d37a70699b23dfdde0740e8823eca67863f0381769089496b9b839a303 WatchSource:0}: Error finding container 13fba3d37a70699b23dfdde0740e8823eca67863f0381769089496b9b839a303: Status 404 returned error can't find the container with id 13fba3d37a70699b23dfdde0740e8823eca67863f0381769089496b9b839a303 Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.229986 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.230023 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.230033 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.230050 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.230062 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.242507 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-9rrkd" event={"ID":"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca","Type":"ContainerStarted","Data":"13fba3d37a70699b23dfdde0740e8823eca67863f0381769089496b9b839a303"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.244060 4685 generic.go:334] "Generic (PLEG): container finished" podID="9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9" containerID="c53ff4ff8a86bb49d0e2cfb98b19c95f66333466f3222047d670531a2c1bd1d9" exitCode=0 Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.244105 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerDied","Data":"c53ff4ff8a86bb49d0e2cfb98b19c95f66333466f3222047d670531a2c1bd1d9"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.254975 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.255031 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.255042 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.255051 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.255060 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.255069 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.256703 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fbf26891f3598bc5921d8018c4cc20c03553d75b343c442e56cfd21d9e28b5c5"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.261801 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.317638 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02bfcfdf-828b-4c2f-a706-73f943dd7bfc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-02T10:01:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97766d83462a09075a97576b6da218ca977a5f040be32bd6155c128b87d7fb05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec426f93ffde853f25bc3547c28e18859220745e5e05da008e0f625d8a180264\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb74e741e137380acb84dede0700a00eaeff1a8f6520adc9fdba1fca13281469\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:01:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-02T10:01:48Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.333340 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.333376 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.333388 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.333410 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.333432 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.335736 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:09Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08e62151b43e6004b6b90c765b92a235026d1958804ee1e888a6d129101fbebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.350082 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:07Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.359501 4685 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-02T10:02:10Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6981f2ae9ba522f439b46d7d007e74c76885e9c0096bb0dab5943622e60c31bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ed797a9da1b9e786a8b7e12fcfe041b2f51955e7943f22101eacbecc202a75f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-02T10:02:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-02T10:02:11Z is after 2025-08-24T17:21:41Z" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.408468 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-mttcn" podStartSLOduration=4.408452004 podStartE2EDuration="4.408452004s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:11.387917736 +0000 UTC m=+23.759691900" watchObservedRunningTime="2025-12-02 10:02:11.408452004 +0000 UTC m=+23.780226158" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.436292 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-kdc2q" podStartSLOduration=4.436271941 podStartE2EDuration="4.436271941s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:11.421321125 +0000 UTC m=+23.793095299" watchObservedRunningTime="2025-12-02 10:02:11.436271941 +0000 UTC m=+23.808046095" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.440249 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.440290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.440303 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.440319 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.440330 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.498754 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podStartSLOduration=4.498737761 podStartE2EDuration="4.498737761s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:11.47112149 +0000 UTC m=+23.842895644" watchObservedRunningTime="2025-12-02 10:02:11.498737761 +0000 UTC m=+23.870511915" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.521060 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=4.521042758 podStartE2EDuration="4.521042758s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:11.520054282 +0000 UTC m=+23.891828446" watchObservedRunningTime="2025-12-02 10:02:11.521042758 +0000 UTC m=+23.892816922" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.543166 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.543202 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.543210 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.543222 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.543232 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.565086 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.565227 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.565271 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.565342 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.565397 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:15.565383045 +0000 UTC m=+27.937157199 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.565676 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:15.565666913 +0000 UTC m=+27.937441057 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.565738 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.565760 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:15.565754815 +0000 UTC m=+27.937528969 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.590517 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=3.590488439 podStartE2EDuration="3.590488439s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:11.555370723 +0000 UTC m=+23.927144877" watchObservedRunningTime="2025-12-02 10:02:11.590488439 +0000 UTC m=+23.962262593" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.645307 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.645342 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.645351 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.645364 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.645373 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.665883 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.665931 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666043 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666063 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666075 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666115 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:15.666102236 +0000 UTC m=+28.037876380 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666163 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666172 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666179 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.666201 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:15.666193529 +0000 UTC m=+28.037967673 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.700655 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7"] Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.701095 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.723922 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.742571 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-rs84c"] Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.743033 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.743101 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rs84c" podUID="25987b61-91e8-4fbc-b90f-3a00533ef0b5" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.744001 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.747161 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.747231 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.747240 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.747252 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.747260 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.767892 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.767957 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.767991 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.768036 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.768060 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zsg2\" (UniqueName: \"kubernetes.io/projected/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-kube-api-access-9zsg2\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.768082 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/25987b61-91e8-4fbc-b90f-3a00533ef0b5-kube-api-access-7jhfw\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.849588 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.849901 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.849913 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.849933 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.849944 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.869384 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.869438 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.869483 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.869510 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zsg2\" (UniqueName: \"kubernetes.io/projected/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-kube-api-access-9zsg2\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.869532 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/25987b61-91e8-4fbc-b90f-3a00533ef0b5-kube-api-access-7jhfw\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.869553 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.869841 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.869915 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs podName:25987b61-91e8-4fbc-b90f-3a00533ef0b5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:12.369897253 +0000 UTC m=+24.741671487 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs") pod "network-metrics-daemon-rs84c" (UID: "25987b61-91e8-4fbc-b90f-3a00533ef0b5") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.870476 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.870645 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.877254 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.891515 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zsg2\" (UniqueName: \"kubernetes.io/projected/a4b6e5c9-624d-4c30-88d5-763b26bf2cd7-kube-api-access-9zsg2\") pod \"ovnkube-control-plane-749d76644c-bn7k7\" (UID: \"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.896668 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jhfw\" (UniqueName: \"kubernetes.io/projected/25987b61-91e8-4fbc-b90f-3a00533ef0b5-kube-api-access-7jhfw\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.900711 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:11 crc kubenswrapper[4685]: E1202 10:02:11.900836 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.952090 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.952128 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.952137 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.952150 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:11 crc kubenswrapper[4685]: I1202 10:02:11.952160 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:11Z","lastTransitionTime":"2025-12-02T10:02:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.013979 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.054882 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.054908 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.054918 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.054932 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.054942 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.157421 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.157486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.157497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.157511 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.157521 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.264229 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.264279 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.264290 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.264308 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.264319 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.266990 4685 generic.go:334] "Generic (PLEG): container finished" podID="9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9" containerID="3cdc8508c041bdf38c357253e8debc01cc52727db0ce2eacdd371c7ac4a9d77b" exitCode=0 Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.267046 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerDied","Data":"3cdc8508c041bdf38c357253e8debc01cc52727db0ce2eacdd371c7ac4a9d77b"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.274273 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" event={"ID":"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7","Type":"ContainerStarted","Data":"34aa400e004dd20ee6a7132d8e69419a8a6a77f15c3ada5713147ad00ba6fdf4"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.274323 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" event={"ID":"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7","Type":"ContainerStarted","Data":"55f5a270e3a7d79bba0c8b4eceb0147e5748aef1cc99e100974dc95a52623ea5"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.277138 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-9rrkd" event={"ID":"c77c98af-6f5d-416a-9ed1-09b2dd7dfeca","Type":"ContainerStarted","Data":"ff2da65ca819d2d3d30204446cccde1d4b482c3d70612eee319717e8b6d88077"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.315618 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-9rrkd" podStartSLOduration=5.3155982139999995 podStartE2EDuration="5.315598214s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:12.315419109 +0000 UTC m=+24.687193263" watchObservedRunningTime="2025-12-02 10:02:12.315598214 +0000 UTC m=+24.687372378" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.366406 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.366434 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.366443 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.366455 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.366464 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.373542 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:12 crc kubenswrapper[4685]: E1202 10:02:12.373859 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:12 crc kubenswrapper[4685]: E1202 10:02:12.373936 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs podName:25987b61-91e8-4fbc-b90f-3a00533ef0b5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:13.373916371 +0000 UTC m=+25.745690595 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs") pod "network-metrics-daemon-rs84c" (UID: "25987b61-91e8-4fbc-b90f-3a00533ef0b5") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.468443 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.468486 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.468499 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.468515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.468527 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.571389 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.571436 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.571449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.571468 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.571481 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.673497 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.673548 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.673575 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.673593 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.673604 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.775888 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.775931 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.775941 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.775956 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.775976 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.877957 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.877997 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.878008 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.878028 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.878040 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.899400 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.899469 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:12 crc kubenswrapper[4685]: E1202 10:02:12.899542 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:12 crc kubenswrapper[4685]: E1202 10:02:12.899639 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.980371 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.980405 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.980416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.980435 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:12 crc kubenswrapper[4685]: I1202 10:02:12.980448 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:12Z","lastTransitionTime":"2025-12-02T10:02:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.083200 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.083237 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.083246 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.083259 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.083268 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.186833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.186883 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.186900 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.186923 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.186935 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.280904 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" event={"ID":"a4b6e5c9-624d-4c30-88d5-763b26bf2cd7","Type":"ContainerStarted","Data":"bd146df9d52b531dcec02c676bcc336137ba4499ce651bb1c8d9c7639adcda01"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.283420 4685 generic.go:334] "Generic (PLEG): container finished" podID="9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9" containerID="bfe143ffd7df68d0f2c801811ac82caaa741e175497b8e3ee17f33d0a2a07854" exitCode=0 Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.284315 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerDied","Data":"bfe143ffd7df68d0f2c801811ac82caaa741e175497b8e3ee17f33d0a2a07854"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.290429 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.290504 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.290516 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.290536 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.290548 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.309074 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bn7k7" podStartSLOduration=5.309056002 podStartE2EDuration="5.309056002s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:13.30751987 +0000 UTC m=+25.679294044" watchObservedRunningTime="2025-12-02 10:02:13.309056002 +0000 UTC m=+25.680830156" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.382648 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:13 crc kubenswrapper[4685]: E1202 10:02:13.382923 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:13 crc kubenswrapper[4685]: E1202 10:02:13.382992 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs podName:25987b61-91e8-4fbc-b90f-3a00533ef0b5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:15.382973584 +0000 UTC m=+27.754747818 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs") pod "network-metrics-daemon-rs84c" (UID: "25987b61-91e8-4fbc-b90f-3a00533ef0b5") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.396083 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.396116 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.396124 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.396138 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.396147 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.498303 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.498375 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.498388 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.498403 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.498413 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.600813 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.600856 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.600866 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.600882 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.600894 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.703330 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.703363 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.703371 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.703386 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.703396 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.806077 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.806124 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.806136 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.806154 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.806166 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.899230 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.899270 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:13 crc kubenswrapper[4685]: E1202 10:02:13.899369 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:13 crc kubenswrapper[4685]: E1202 10:02:13.899471 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rs84c" podUID="25987b61-91e8-4fbc-b90f-3a00533ef0b5" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.907762 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.907802 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.907811 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.907829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:13 crc kubenswrapper[4685]: I1202 10:02:13.907838 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:13Z","lastTransitionTime":"2025-12-02T10:02:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.010965 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.011295 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.011310 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.011329 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.011342 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.114153 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.114194 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.114203 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.114220 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.114230 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.216716 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.216747 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.216755 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.216768 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.216776 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.289500 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.291875 4685 generic.go:334] "Generic (PLEG): container finished" podID="9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9" containerID="d044d104c82f8b7f648ccdb89fe35750eae8158556e8ebc8662bce15dbfc143b" exitCode=0 Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.292591 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerDied","Data":"d044d104c82f8b7f648ccdb89fe35750eae8158556e8ebc8662bce15dbfc143b"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.318537 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.318609 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.318620 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.318638 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.318648 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.421032 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.421081 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.421092 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.421112 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.421124 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.523815 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.523842 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.523850 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.523863 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.523871 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.626235 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.626286 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.626298 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.626314 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.626325 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.729104 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.729156 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.729168 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.729185 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.729198 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.832027 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.832059 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.832067 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.832081 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.832090 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.898688 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.898711 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:14 crc kubenswrapper[4685]: E1202 10:02:14.898835 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:14 crc kubenswrapper[4685]: E1202 10:02:14.898960 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.934484 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.934524 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.934535 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.934551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:14 crc kubenswrapper[4685]: I1202 10:02:14.934576 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:14Z","lastTransitionTime":"2025-12-02T10:02:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.037785 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.037820 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.037829 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.037845 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.037854 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.139789 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.139833 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.139848 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.139870 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.139880 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.242091 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.242126 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.242137 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.242153 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.242168 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.299251 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerStarted","Data":"a270bb016255fe71df4c8381794ba3b332f08c4069f2d10b888941d314204a6e"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.345127 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.345176 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.345187 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.345204 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.345214 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.403829 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.404007 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.404822 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs podName:25987b61-91e8-4fbc-b90f-3a00533ef0b5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:19.404797301 +0000 UTC m=+31.776571465 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs") pod "network-metrics-daemon-rs84c" (UID: "25987b61-91e8-4fbc-b90f-3a00533ef0b5") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.447416 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.447449 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.447459 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.447485 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.447496 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.549576 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.549610 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.549618 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.549631 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.549641 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.606615 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.606726 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.606748 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.606813 4685 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.606858 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:23.60684506 +0000 UTC m=+35.978619214 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.606893 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:23.606864231 +0000 UTC m=+35.978638385 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.606972 4685 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.607096 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:23.607068736 +0000 UTC m=+35.978842920 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.651404 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.651446 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.651454 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.651469 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.651478 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.708439 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.708538 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708639 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708650 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708663 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708668 4685 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708679 4685 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708680 4685 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708730 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:23.708713332 +0000 UTC m=+36.080487496 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.708750 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:23.708741453 +0000 UTC m=+36.080515617 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.755084 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.755419 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.755430 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.755446 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.755456 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.857632 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.857705 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.857717 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.857739 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.857753 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.899375 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.899433 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.899547 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rs84c" podUID="25987b61-91e8-4fbc-b90f-3a00533ef0b5" Dec 02 10:02:15 crc kubenswrapper[4685]: E1202 10:02:15.899665 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.960589 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.960621 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.960633 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.960650 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:15 crc kubenswrapper[4685]: I1202 10:02:15.960661 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:15Z","lastTransitionTime":"2025-12-02T10:02:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.063514 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.063551 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.063585 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.063607 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.063617 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:16Z","lastTransitionTime":"2025-12-02T10:02:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.166889 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.166922 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.166933 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.166946 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.166954 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:16Z","lastTransitionTime":"2025-12-02T10:02:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.268971 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.269067 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.269088 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.269113 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.269135 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:16Z","lastTransitionTime":"2025-12-02T10:02:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.290463 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.290503 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.290515 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.290533 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.290545 4685 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-02T10:02:16Z","lastTransitionTime":"2025-12-02T10:02:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.313574 4685 generic.go:334] "Generic (PLEG): container finished" podID="9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9" containerID="a270bb016255fe71df4c8381794ba3b332f08c4069f2d10b888941d314204a6e" exitCode=0 Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.313621 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerDied","Data":"a270bb016255fe71df4c8381794ba3b332f08c4069f2d10b888941d314204a6e"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.319524 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerStarted","Data":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.320380 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.320452 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.350587 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.352031 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd"] Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.352333 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.355313 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.357396 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.357527 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.358088 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.358849 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.376178 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podStartSLOduration=9.376163149 podStartE2EDuration="9.376163149s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:16.375614563 +0000 UTC m=+28.747388727" watchObservedRunningTime="2025-12-02 10:02:16.376163149 +0000 UTC m=+28.747937303" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.415916 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7bba04a3-49bf-45be-848f-82d9861b57b9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.416013 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bba04a3-49bf-45be-848f-82d9861b57b9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.416037 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7bba04a3-49bf-45be-848f-82d9861b57b9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.416077 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7bba04a3-49bf-45be-848f-82d9861b57b9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.416515 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bba04a3-49bf-45be-848f-82d9861b57b9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.517572 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bba04a3-49bf-45be-848f-82d9861b57b9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.517802 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7bba04a3-49bf-45be-848f-82d9861b57b9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.517939 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bba04a3-49bf-45be-848f-82d9861b57b9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.518057 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7bba04a3-49bf-45be-848f-82d9861b57b9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.518160 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7bba04a3-49bf-45be-848f-82d9861b57b9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.517943 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7bba04a3-49bf-45be-848f-82d9861b57b9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.518486 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7bba04a3-49bf-45be-848f-82d9861b57b9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.518891 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7bba04a3-49bf-45be-848f-82d9861b57b9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.521921 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bba04a3-49bf-45be-848f-82d9861b57b9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.532635 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bba04a3-49bf-45be-848f-82d9861b57b9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-snlmd\" (UID: \"7bba04a3-49bf-45be-848f-82d9861b57b9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.665975 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" Dec 02 10:02:16 crc kubenswrapper[4685]: W1202 10:02:16.683023 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7bba04a3_49bf_45be_848f_82d9861b57b9.slice/crio-ada103213977ac61dbcdcee0f4b4ce1518c1310056331741bdc22d943a38ef63 WatchSource:0}: Error finding container ada103213977ac61dbcdcee0f4b4ce1518c1310056331741bdc22d943a38ef63: Status 404 returned error can't find the container with id ada103213977ac61dbcdcee0f4b4ce1518c1310056331741bdc22d943a38ef63 Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.899256 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:16 crc kubenswrapper[4685]: I1202 10:02:16.899295 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:16 crc kubenswrapper[4685]: E1202 10:02:16.899471 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:16 crc kubenswrapper[4685]: E1202 10:02:16.899645 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:17 crc kubenswrapper[4685]: I1202 10:02:17.325441 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" event={"ID":"7bba04a3-49bf-45be-848f-82d9861b57b9","Type":"ContainerStarted","Data":"ada103213977ac61dbcdcee0f4b4ce1518c1310056331741bdc22d943a38ef63"} Dec 02 10:02:17 crc kubenswrapper[4685]: I1202 10:02:17.325529 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:02:17 crc kubenswrapper[4685]: I1202 10:02:17.899265 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:17 crc kubenswrapper[4685]: I1202 10:02:17.899342 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:17 crc kubenswrapper[4685]: E1202 10:02:17.900250 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rs84c" podUID="25987b61-91e8-4fbc-b90f-3a00533ef0b5" Dec 02 10:02:17 crc kubenswrapper[4685]: E1202 10:02:17.900429 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.332481 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" event={"ID":"9555ebe3-c169-45ad-97d0-3a7eb6b2b4b9","Type":"ContainerStarted","Data":"ffad8730e4544afa480057f45dcfbd32e16b1ea4caeb2db285f6f1abc8936206"} Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.333617 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" event={"ID":"7bba04a3-49bf-45be-848f-82d9861b57b9","Type":"ContainerStarted","Data":"46db2b5224898f578596794ca816ed49a9af16871de10f2da0874d47f4e3cf21"} Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.333691 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.356710 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-qp5vt" podStartSLOduration=11.356689471 podStartE2EDuration="11.356689471s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:18.355350925 +0000 UTC m=+30.727125089" watchObservedRunningTime="2025-12-02 10:02:18.356689471 +0000 UTC m=+30.728463645" Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.509848 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-snlmd" podStartSLOduration=11.509829389 podStartE2EDuration="11.509829389s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:18.373305733 +0000 UTC m=+30.745079887" watchObservedRunningTime="2025-12-02 10:02:18.509829389 +0000 UTC m=+30.881603543" Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.509999 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rs84c"] Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.510122 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:18 crc kubenswrapper[4685]: E1202 10:02:18.510224 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rs84c" podUID="25987b61-91e8-4fbc-b90f-3a00533ef0b5" Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.899691 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:18 crc kubenswrapper[4685]: E1202 10:02:18.899835 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:18 crc kubenswrapper[4685]: I1202 10:02:18.900186 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:18 crc kubenswrapper[4685]: E1202 10:02:18.900232 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:19 crc kubenswrapper[4685]: I1202 10:02:19.447431 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:19 crc kubenswrapper[4685]: E1202 10:02:19.447731 4685 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:19 crc kubenswrapper[4685]: E1202 10:02:19.447810 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs podName:25987b61-91e8-4fbc-b90f-3a00533ef0b5 nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.447780987 +0000 UTC m=+39.819555141 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs") pod "network-metrics-daemon-rs84c" (UID: "25987b61-91e8-4fbc-b90f-3a00533ef0b5") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 02 10:02:19 crc kubenswrapper[4685]: I1202 10:02:19.899524 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:19 crc kubenswrapper[4685]: I1202 10:02:19.899610 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:19 crc kubenswrapper[4685]: E1202 10:02:19.899690 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rs84c" podUID="25987b61-91e8-4fbc-b90f-3a00533ef0b5" Dec 02 10:02:19 crc kubenswrapper[4685]: E1202 10:02:19.900033 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 02 10:02:20 crc kubenswrapper[4685]: I1202 10:02:20.899352 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:20 crc kubenswrapper[4685]: I1202 10:02:20.899439 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:20 crc kubenswrapper[4685]: E1202 10:02:20.899541 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 02 10:02:20 crc kubenswrapper[4685]: E1202 10:02:20.899708 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.627952 4685 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.628161 4685 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.661000 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-5glrx"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.661683 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.668024 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.668532 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.669912 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.669975 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.670125 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.670357 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.670752 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.671666 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.672396 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.672898 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ksvk4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.673623 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.675836 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.676082 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.676707 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.677653 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.698491 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.700306 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.701738 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t4sm7"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.715743 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.716138 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.717221 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.717627 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-m7vnk"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.717969 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.718120 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-hgnrz"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.718265 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.718474 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.720976 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721106 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721216 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721335 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721399 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721455 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721481 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721523 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721549 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721675 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721692 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721708 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721677 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721864 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.721985 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722001 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722106 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722127 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722149 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722106 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722131 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722250 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722316 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722326 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722342 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722383 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722391 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722432 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722446 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722451 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722437 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722547 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722697 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722734 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.722823 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.723530 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vbfhw"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.724219 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.726488 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.733072 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-5glrx"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.734626 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mfk9b"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.735178 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.736137 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t5nfh"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.736522 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t4sm7"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.736631 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.740953 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.742984 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744069 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744287 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744350 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744437 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744613 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744625 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744674 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744819 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744844 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.744641 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745009 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745066 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745159 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745181 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745303 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745345 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745304 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745476 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745539 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745595 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.745849 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.746047 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.746275 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.746466 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.746651 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.746791 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.746914 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747102 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747204 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747459 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747711 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747746 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747784 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747930 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.747723 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.748222 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.748290 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.748425 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.748891 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.749060 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.750154 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.751395 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.751563 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.752995 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.753163 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.753480 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.753670 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.753744 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.753873 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.753909 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.758715 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.759359 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794176 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794228 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9sr6\" (UniqueName: \"kubernetes.io/projected/4e159366-bd40-43f3-9b3b-c818913c957a-kube-api-access-x9sr6\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794245 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794267 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09cfde35-3539-4f7e-8985-9e0cd922f6e8-auth-proxy-config\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794287 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-config\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794302 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794316 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794333 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2dee0464-6eab-44a6-a33d-3b6096319ecf-images\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794346 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794363 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09cfde35-3539-4f7e-8985-9e0cd922f6e8-config\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794376 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794443 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-config\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794459 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-etcd-client\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794478 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-console-config\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794494 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2gt8\" (UniqueName: \"kubernetes.io/projected/12f6ce83-cb37-4e64-92c4-fb96aea3b213-kube-api-access-m2gt8\") pod \"cluster-samples-operator-665b6dd947-t84dj\" (UID: \"12f6ce83-cb37-4e64-92c4-fb96aea3b213\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794508 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/387d4a50-7633-485d-b8d3-ef2afc8c3c97-serving-cert\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794535 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/09cfde35-3539-4f7e-8985-9e0cd922f6e8-machine-approver-tls\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794549 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-image-import-ca\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794568 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794584 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-serving-cert\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794613 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794628 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-encryption-config\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794646 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-config\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794661 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvg54\" (UniqueName: \"kubernetes.io/projected/b1d639aa-af8d-40c6-bd5a-6543735cce4e-kube-api-access-zvg54\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794676 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52g4w\" (UniqueName: \"kubernetes.io/projected/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-kube-api-access-52g4w\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794692 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-audit-policies\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794709 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/332dd167-8e86-4071-a277-547ab88bc15d-audit-dir\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794724 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dee0464-6eab-44a6-a33d-3b6096319ecf-config\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794740 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794759 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-audit-policies\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794776 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-client-ca\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794792 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-serving-cert\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794806 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn4hd\" (UniqueName: \"kubernetes.io/projected/2dee0464-6eab-44a6-a33d-3b6096319ecf-kube-api-access-sn4hd\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794822 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-serving-cert\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794836 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-audit-dir\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794852 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-config\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794874 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794890 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-service-ca-bundle\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794907 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfbr2\" (UniqueName: \"kubernetes.io/projected/8e3956cb-a0a9-4add-862c-b7facbcbf400-kube-api-access-wfbr2\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794922 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjzxd\" (UniqueName: \"kubernetes.io/projected/60826f81-d0cb-4339-83b0-d40a5df1aff8-kube-api-access-pjzxd\") pod \"downloads-7954f5f757-m7vnk\" (UID: \"60826f81-d0cb-4339-83b0-d40a5df1aff8\") " pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794939 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e3956cb-a0a9-4add-862c-b7facbcbf400-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794961 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-service-ca\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.794984 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-trusted-ca-bundle\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.795001 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.795022 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e3956cb-a0a9-4add-862c-b7facbcbf400-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797008 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797122 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vr7n\" (UniqueName: \"kubernetes.io/projected/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-kube-api-access-7vr7n\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797159 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/2dee0464-6eab-44a6-a33d-3b6096319ecf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797195 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797217 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/12f6ce83-cb37-4e64-92c4-fb96aea3b213-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-t84dj\" (UID: \"12f6ce83-cb37-4e64-92c4-fb96aea3b213\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797237 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz8sk\" (UniqueName: \"kubernetes.io/projected/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-kube-api-access-sz8sk\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797264 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797284 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797302 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpd9r\" (UniqueName: \"kubernetes.io/projected/387d4a50-7633-485d-b8d3-ef2afc8c3c97-kube-api-access-wpd9r\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797328 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797349 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-trusted-ca\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797387 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/589484c8-ce97-4e9e-b17a-58af833d9915-serving-cert\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797410 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-etcd-client\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797427 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-etcd-serving-ca\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797447 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6651d927-5e58-43b0-8c92-f425f6145e31-audit-dir\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797466 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5qxh\" (UniqueName: \"kubernetes.io/projected/6651d927-5e58-43b0-8c92-f425f6145e31-kube-api-access-j5qxh\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797486 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-oauth-serving-cert\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.797505 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68td8\" (UniqueName: \"kubernetes.io/projected/332dd167-8e86-4071-a277-547ab88bc15d-kube-api-access-68td8\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.803655 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.804249 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.804373 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-tkpq4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.804913 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.805227 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.806998 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.808234 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.809085 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-mpwz8"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.809427 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fcrk7"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.809857 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.810727 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.810974 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.811815 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-client-ca\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.811885 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-audit\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.811903 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-encryption-config\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.811928 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz4rz\" (UniqueName: \"kubernetes.io/projected/09cfde35-3539-4f7e-8985-9e0cd922f6e8-kube-api-access-zz4rz\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.811948 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78qwp\" (UniqueName: \"kubernetes.io/projected/589484c8-ce97-4e9e-b17a-58af833d9915-kube-api-access-78qwp\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.811968 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6651d927-5e58-43b0-8c92-f425f6145e31-node-pullsecrets\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812029 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-available-featuregates\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812055 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-oauth-config\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812078 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-serving-cert\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812099 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2gfg\" (UniqueName: \"kubernetes.io/projected/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-kube-api-access-n2gfg\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812114 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-config\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812131 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-serving-cert\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812147 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812165 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1d639aa-af8d-40c6-bd5a-6543735cce4e-serving-cert\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812194 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812210 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.812535 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.817995 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.818093 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.818271 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.819834 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nr46b"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.820545 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.821526 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.824205 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jmz9s"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.825820 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.825949 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.827194 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.850859 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.851397 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.853472 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-269n4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.854916 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9rnjd"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.855014 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.855587 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.855808 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.873858 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.874395 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.874690 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.875077 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.875668 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-vwpld"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.876352 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.876634 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-96htj"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.877046 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.877233 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.877772 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.878442 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.878643 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.879036 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.879168 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.879602 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.880623 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.881157 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.884819 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.885546 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.885859 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.885961 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.886827 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.895036 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.895092 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-m7vnk"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.899149 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.899612 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.900035 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.904145 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.904201 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.906891 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.908503 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ksvk4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.909002 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.913705 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.914724 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.914802 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9rnjd"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915293 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-serving-cert\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915374 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-audit-dir\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915442 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-config\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915503 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-serving-cert\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915565 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn4hd\" (UniqueName: \"kubernetes.io/projected/2dee0464-6eab-44a6-a33d-3b6096319ecf-kube-api-access-sn4hd\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915670 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915768 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aea099c8-3890-4ce5-9965-10f136542d87-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915836 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74cnc\" (UniqueName: \"kubernetes.io/projected/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-kube-api-access-74cnc\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915901 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfbr2\" (UniqueName: \"kubernetes.io/projected/8e3956cb-a0a9-4add-862c-b7facbcbf400-kube-api-access-wfbr2\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.915966 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-service-ca-bundle\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916048 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjzxd\" (UniqueName: \"kubernetes.io/projected/60826f81-d0cb-4339-83b0-d40a5df1aff8-kube-api-access-pjzxd\") pod \"downloads-7954f5f757-m7vnk\" (UID: \"60826f81-d0cb-4339-83b0-d40a5df1aff8\") " pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916120 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e3956cb-a0a9-4add-862c-b7facbcbf400-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916194 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-service-ca\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916257 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6136cbbe-cd55-4ba7-829a-621906a4b7b7-trusted-ca\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916326 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-trusted-ca-bundle\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916391 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916452 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e3956cb-a0a9-4add-862c-b7facbcbf400-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916514 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7tsf\" (UniqueName: \"kubernetes.io/projected/de06fcee-1542-419a-87ae-cb854b6b4a5e-kube-api-access-d7tsf\") pod \"ingress-canary-269n4\" (UID: \"de06fcee-1542-419a-87ae-cb854b6b4a5e\") " pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916619 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vr7n\" (UniqueName: \"kubernetes.io/projected/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-kube-api-access-7vr7n\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916695 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/2dee0464-6eab-44a6-a33d-3b6096319ecf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916758 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916824 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/12f6ce83-cb37-4e64-92c4-fb96aea3b213-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-t84dj\" (UID: \"12f6ce83-cb37-4e64-92c4-fb96aea3b213\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.916885 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfmhp\" (UniqueName: \"kubernetes.io/projected/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-kube-api-access-gfmhp\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917015 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpd9r\" (UniqueName: \"kubernetes.io/projected/387d4a50-7633-485d-b8d3-ef2afc8c3c97-kube-api-access-wpd9r\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917080 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917146 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz8sk\" (UniqueName: \"kubernetes.io/projected/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-kube-api-access-sz8sk\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917209 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917272 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917339 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-trusted-ca\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917407 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/589484c8-ce97-4e9e-b17a-58af833d9915-serving-cert\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917469 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-etcd-client\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917530 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-etcd-serving-ca\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917619 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6651d927-5e58-43b0-8c92-f425f6145e31-audit-dir\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-certs\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917767 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5qxh\" (UniqueName: \"kubernetes.io/projected/6651d927-5e58-43b0-8c92-f425f6145e31-kube-api-access-j5qxh\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917828 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6136cbbe-cd55-4ba7-829a-621906a4b7b7-metrics-tls\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-audit\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.917962 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-encryption-config\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918030 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-oauth-serving-cert\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918092 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68td8\" (UniqueName: \"kubernetes.io/projected/332dd167-8e86-4071-a277-547ab88bc15d-kube-api-access-68td8\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918160 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-client-ca\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918298 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz4rz\" (UniqueName: \"kubernetes.io/projected/09cfde35-3539-4f7e-8985-9e0cd922f6e8-kube-api-access-zz4rz\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918369 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-default-certificate\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918445 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-signing-cabundle\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918534 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78qwp\" (UniqueName: \"kubernetes.io/projected/589484c8-ce97-4e9e-b17a-58af833d9915-kube-api-access-78qwp\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918618 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6651d927-5e58-43b0-8c92-f425f6145e31-node-pullsecrets\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918687 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wwb8\" (UniqueName: \"kubernetes.io/projected/aea099c8-3890-4ce5-9965-10f136542d87-kube-api-access-4wwb8\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918770 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-available-featuregates\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918853 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-metrics-certs\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.918931 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-config\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919000 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/096d8145-ede4-4af3-a326-7a1739ca1dc4-service-ca-bundle\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919067 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2655a11a-813f-40db-9e51-33122afc460a-metrics-tls\") pod \"dns-operator-744455d44c-jmz9s\" (UID: \"2655a11a-813f-40db-9e51-33122afc460a\") " pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919136 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-oauth-config\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919201 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-serving-cert\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919303 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2gfg\" (UniqueName: \"kubernetes.io/projected/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-kube-api-access-n2gfg\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919372 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919473 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1d639aa-af8d-40c6-bd5a-6543735cce4e-serving-cert\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919549 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-serving-cert\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919664 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919755 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmm8h\" (UniqueName: \"kubernetes.io/projected/096d8145-ede4-4af3-a326-7a1739ca1dc4-kube-api-access-rmm8h\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919306 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-audit-dir\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919331 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.920042 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nr46b"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.919606 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-config\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.920237 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-service-ca-bundle\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.920723 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-config\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.921464 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-service-ca\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.921691 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-serving-cert\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.923098 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-serving-cert\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.923171 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-trusted-ca-bundle\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.923463 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6651d927-5e58-43b0-8c92-f425f6145e31-audit-dir\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.924180 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e3956cb-a0a9-4add-862c-b7facbcbf400-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.926852 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-trusted-ca\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.927020 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-etcd-serving-ca\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.927169 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6651d927-5e58-43b0-8c92-f425f6145e31-node-pullsecrets\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.927301 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-hgnrz"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.927360 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vbfhw"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.927477 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-audit\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.927780 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-available-featuregates\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.928509 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-config\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.929059 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.929975 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-encryption-config\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.930633 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-oauth-serving-cert\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.931555 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-client-ca\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.934541 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-oauth-config\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.934836 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.935468 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e3956cb-a0a9-4add-862c-b7facbcbf400-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.935651 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/2dee0464-6eab-44a6-a33d-3b6096319ecf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.935712 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.938431 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.938477 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aea099c8-3890-4ce5-9965-10f136542d87-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939216 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9sr6\" (UniqueName: \"kubernetes.io/projected/4e159366-bd40-43f3-9b3b-c818913c957a-kube-api-access-x9sr6\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939270 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939297 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09cfde35-3539-4f7e-8985-9e0cd922f6e8-auth-proxy-config\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939325 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-stats-auth\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939351 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939556 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-etcd-client\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.939600 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.940989 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.941068 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdh84\" (UniqueName: \"kubernetes.io/projected/6136cbbe-cd55-4ba7-829a-621906a4b7b7-kube-api-access-tdh84\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.941370 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.941682 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.941763 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-serving-cert\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.942185 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/589484c8-ce97-4e9e-b17a-58af833d9915-serving-cert\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.942278 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.942621 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mfk9b"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.942737 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.942889 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/09cfde35-3539-4f7e-8985-9e0cd922f6e8-auth-proxy-config\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943287 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943002 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943655 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1d639aa-af8d-40c6-bd5a-6543735cce4e-serving-cert\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943787 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943865 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-config\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943898 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/de06fcee-1542-419a-87ae-cb854b6b4a5e-cert\") pod \"ingress-canary-269n4\" (UID: \"de06fcee-1542-419a-87ae-cb854b6b4a5e\") " pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943925 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7jpv\" (UniqueName: \"kubernetes.io/projected/2655a11a-813f-40db-9e51-33122afc460a-kube-api-access-v7jpv\") pod \"dns-operator-744455d44c-jmz9s\" (UID: \"2655a11a-813f-40db-9e51-33122afc460a\") " pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943949 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2dee0464-6eab-44a6-a33d-3b6096319ecf-images\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943969 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943986 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-config\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944003 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-etcd-client\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944022 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09cfde35-3539-4f7e-8985-9e0cd922f6e8-config\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944038 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944055 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-console-config\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944072 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2gt8\" (UniqueName: \"kubernetes.io/projected/12f6ce83-cb37-4e64-92c4-fb96aea3b213-kube-api-access-m2gt8\") pod \"cluster-samples-operator-665b6dd947-t84dj\" (UID: \"12f6ce83-cb37-4e64-92c4-fb96aea3b213\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944088 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/387d4a50-7633-485d-b8d3-ef2afc8c3c97-serving-cert\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944109 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/09cfde35-3539-4f7e-8985-9e0cd922f6e8-machine-approver-tls\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944124 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-image-import-ca\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944141 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944199 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aea099c8-3890-4ce5-9965-10f136542d87-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944215 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-encryption-config\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944230 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-serving-cert\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944246 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944260 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6136cbbe-cd55-4ba7-829a-621906a4b7b7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944276 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-config\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944291 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvg54\" (UniqueName: \"kubernetes.io/projected/b1d639aa-af8d-40c6-bd5a-6543735cce4e-kube-api-access-zvg54\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944306 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52g4w\" (UniqueName: \"kubernetes.io/projected/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-kube-api-access-52g4w\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944325 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-audit-policies\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944339 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/332dd167-8e86-4071-a277-547ab88bc15d-audit-dir\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944355 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-node-bootstrap-token\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944378 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dee0464-6eab-44a6-a33d-3b6096319ecf-config\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944393 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944408 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-audit-policies\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944427 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-client-ca\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.944442 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-signing-key\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.943814 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.945011 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.945230 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-config\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.945264 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.945286 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.945823 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-console-config\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.945831 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.946361 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/12f6ce83-cb37-4e64-92c4-fb96aea3b213-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-t84dj\" (UID: \"12f6ce83-cb37-4e64-92c4-fb96aea3b213\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.946601 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.946790 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-audit-policies\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.947824 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/2dee0464-6eab-44a6-a33d-3b6096319ecf-images\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.947922 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.948249 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/332dd167-8e86-4071-a277-547ab88bc15d-audit-dir\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.948279 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-config\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.948981 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2dee0464-6eab-44a6-a33d-3b6096319ecf-config\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.949396 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09cfde35-3539-4f7e-8985-9e0cd922f6e8-config\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.949731 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-image-import-ca\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.949891 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-audit-policies\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.949982 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-client-ca\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.950384 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.950396 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.950435 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.950577 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.950773 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-serving-cert\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.951062 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.951137 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-config\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.952041 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6651d927-5e58-43b0-8c92-f425f6145e31-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.952315 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/387d4a50-7633-485d-b8d3-ef2afc8c3c97-serving-cert\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.952353 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.953159 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.960809 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-serving-cert\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.961168 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.961468 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6651d927-5e58-43b0-8c92-f425f6145e31-etcd-client\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.961887 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-encryption-config\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.962165 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.962950 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.965182 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t5nfh"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.966165 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kj84m"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.972547 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.973536 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xf986"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.974075 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.982360 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1d639aa-af8d-40c6-bd5a-6543735cce4e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.982480 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fcrk7"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.982501 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.982512 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.983015 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/09cfde35-3539-4f7e-8985-9e0cd922f6e8-machine-approver-tls\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.983149 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.983169 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.983243 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.983402 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.984134 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.985287 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.986482 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.987530 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.988396 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.988398 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-28vxg"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.990058 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.990129 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.991806 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.992808 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-269n4"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.993882 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.995232 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-vwpld"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.996250 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xf986"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.997108 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.998064 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc"] Dec 02 10:02:21 crc kubenswrapper[4685]: I1202 10:02:21.999041 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jmz9s"] Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.000378 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kj84m"] Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.001701 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg"] Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.002943 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf"] Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.003522 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.004097 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-28vxg"] Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.023701 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.043876 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.044998 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/de06fcee-1542-419a-87ae-cb854b6b4a5e-cert\") pod \"ingress-canary-269n4\" (UID: \"de06fcee-1542-419a-87ae-cb854b6b4a5e\") " pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045155 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7jpv\" (UniqueName: \"kubernetes.io/projected/2655a11a-813f-40db-9e51-33122afc460a-kube-api-access-v7jpv\") pod \"dns-operator-744455d44c-jmz9s\" (UID: \"2655a11a-813f-40db-9e51-33122afc460a\") " pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045204 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aea099c8-3890-4ce5-9965-10f136542d87-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045223 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6136cbbe-cd55-4ba7-829a-621906a4b7b7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045251 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-node-bootstrap-token\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045271 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-signing-key\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aea099c8-3890-4ce5-9965-10f136542d87-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045323 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74cnc\" (UniqueName: \"kubernetes.io/projected/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-kube-api-access-74cnc\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045379 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6136cbbe-cd55-4ba7-829a-621906a4b7b7-trusted-ca\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045398 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7tsf\" (UniqueName: \"kubernetes.io/projected/de06fcee-1542-419a-87ae-cb854b6b4a5e-kube-api-access-d7tsf\") pod \"ingress-canary-269n4\" (UID: \"de06fcee-1542-419a-87ae-cb854b6b4a5e\") " pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045421 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfmhp\" (UniqueName: \"kubernetes.io/projected/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-kube-api-access-gfmhp\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045451 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6136cbbe-cd55-4ba7-829a-621906a4b7b7-metrics-tls\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045466 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-certs\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045501 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-default-certificate\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045518 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wwb8\" (UniqueName: \"kubernetes.io/projected/aea099c8-3890-4ce5-9965-10f136542d87-kube-api-access-4wwb8\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045538 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-signing-cabundle\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045587 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-metrics-certs\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045634 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/096d8145-ede4-4af3-a326-7a1739ca1dc4-service-ca-bundle\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045656 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2655a11a-813f-40db-9e51-33122afc460a-metrics-tls\") pod \"dns-operator-744455d44c-jmz9s\" (UID: \"2655a11a-813f-40db-9e51-33122afc460a\") " pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045690 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmm8h\" (UniqueName: \"kubernetes.io/projected/096d8145-ede4-4af3-a326-7a1739ca1dc4-kube-api-access-rmm8h\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045713 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aea099c8-3890-4ce5-9965-10f136542d87-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045736 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-stats-auth\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.045766 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdh84\" (UniqueName: \"kubernetes.io/projected/6136cbbe-cd55-4ba7-829a-621906a4b7b7-kube-api-access-tdh84\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.047280 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/096d8145-ede4-4af3-a326-7a1739ca1dc4-service-ca-bundle\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.048005 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/aea099c8-3890-4ce5-9965-10f136542d87-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.048265 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6136cbbe-cd55-4ba7-829a-621906a4b7b7-trusted-ca\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.050212 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-metrics-certs\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.050730 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-stats-auth\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.051980 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/096d8145-ede4-4af3-a326-7a1739ca1dc4-default-certificate\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.070093 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.085126 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.105217 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.124135 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.129202 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6136cbbe-cd55-4ba7-829a-621906a4b7b7-metrics-tls\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.144230 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.155138 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-node-bootstrap-token\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.163825 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.183691 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.189483 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-certs\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.224778 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.243692 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.263803 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.277776 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-signing-key\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.283718 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.288153 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-signing-cabundle\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.304625 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.324477 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.345299 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.363654 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.370570 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2655a11a-813f-40db-9e51-33122afc460a-metrics-tls\") pod \"dns-operator-744455d44c-jmz9s\" (UID: \"2655a11a-813f-40db-9e51-33122afc460a\") " pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.384681 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.424790 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.429289 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/aea099c8-3890-4ce5-9965-10f136542d87-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.444522 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.464190 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.483940 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.505028 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.524946 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.545103 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.565091 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.584073 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.604586 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.624473 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.643607 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.664524 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.684445 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.704088 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.724413 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.743884 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.764730 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.770763 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/de06fcee-1542-419a-87ae-cb854b6b4a5e-cert\") pod \"ingress-canary-269n4\" (UID: \"de06fcee-1542-419a-87ae-cb854b6b4a5e\") " pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.785345 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.804777 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.824867 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.845056 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.864646 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.882290 4685 request.go:700] Waited for 1.004260643s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dcollect-profiles-dockercfg-kzf4t&limit=500&resourceVersion=0 Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.884020 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.899488 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.899488 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.904314 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.924194 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.944752 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.964848 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 10:02:22 crc kubenswrapper[4685]: I1202 10:02:22.984467 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.004249 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.024167 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.044176 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.064350 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.084649 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.104336 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.123339 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.144183 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.164937 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.184031 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.204665 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.224989 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.244543 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.263315 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.283826 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.284080 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.284638 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.300220 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.305021 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.345739 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz8sk\" (UniqueName: \"kubernetes.io/projected/c26cde94-5fa8-4f83-ba18-eef7033f6ec1-kube-api-access-sz8sk\") pod \"openshift-config-operator-7777fb866f-fdsrt\" (UID: \"c26cde94-5fa8-4f83-ba18-eef7033f6ec1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.347683 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.367917 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn4hd\" (UniqueName: \"kubernetes.io/projected/2dee0464-6eab-44a6-a33d-3b6096319ecf-kube-api-access-sn4hd\") pod \"machine-api-operator-5694c8668f-5glrx\" (UID: \"2dee0464-6eab-44a6-a33d-3b6096319ecf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.386276 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfbr2\" (UniqueName: \"kubernetes.io/projected/8e3956cb-a0a9-4add-862c-b7facbcbf400-kube-api-access-wfbr2\") pod \"openshift-controller-manager-operator-756b6f6bc6-nvrwh\" (UID: \"8e3956cb-a0a9-4add-862c-b7facbcbf400\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.405154 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjzxd\" (UniqueName: \"kubernetes.io/projected/60826f81-d0cb-4339-83b0-d40a5df1aff8-kube-api-access-pjzxd\") pod \"downloads-7954f5f757-m7vnk\" (UID: \"60826f81-d0cb-4339-83b0-d40a5df1aff8\") " pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.418481 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vr7n\" (UniqueName: \"kubernetes.io/projected/492f18b2-8ffc-4f93-a2de-6bf204e1a4f3-kube-api-access-7vr7n\") pod \"console-operator-58897d9998-mfk9b\" (UID: \"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3\") " pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.440288 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78qwp\" (UniqueName: \"kubernetes.io/projected/589484c8-ce97-4e9e-b17a-58af833d9915-kube-api-access-78qwp\") pod \"route-controller-manager-6576b87f9c-cz9bg\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.461545 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68td8\" (UniqueName: \"kubernetes.io/projected/332dd167-8e86-4071-a277-547ab88bc15d-kube-api-access-68td8\") pod \"oauth-openshift-558db77b4-vbfhw\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.476632 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz4rz\" (UniqueName: \"kubernetes.io/projected/09cfde35-3539-4f7e-8985-9e0cd922f6e8-kube-api-access-zz4rz\") pod \"machine-approver-56656f9798-n64h8\" (UID: \"09cfde35-3539-4f7e-8985-9e0cd922f6e8\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.486360 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.498830 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.500240 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.502558 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.505994 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5qxh\" (UniqueName: \"kubernetes.io/projected/6651d927-5e58-43b0-8c92-f425f6145e31-kube-api-access-j5qxh\") pod \"apiserver-76f77b778f-ksvk4\" (UID: \"6651d927-5e58-43b0-8c92-f425f6145e31\") " pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.516566 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2gfg\" (UniqueName: \"kubernetes.io/projected/0a30ded0-7ce0-4ed4-9e40-9266f1472af4-kube-api-access-n2gfg\") pod \"apiserver-7bbb656c7d-z9vhz\" (UID: \"0a30ded0-7ce0-4ed4-9e40-9266f1472af4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.532028 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.537388 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpd9r\" (UniqueName: \"kubernetes.io/projected/387d4a50-7633-485d-b8d3-ef2afc8c3c97-kube-api-access-wpd9r\") pod \"controller-manager-879f6c89f-t5nfh\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.558310 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9sr6\" (UniqueName: \"kubernetes.io/projected/4e159366-bd40-43f3-9b3b-c818913c957a-kube-api-access-x9sr6\") pod \"console-f9d7485db-hgnrz\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.564305 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.583727 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.596219 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.603245 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.603524 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.624068 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.644038 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.666626 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.666777 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.666801 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:23 crc kubenswrapper[4685]: E1202 10:02:23.667106 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:39.667080611 +0000 UTC m=+52.038854765 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.679063 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2gt8\" (UniqueName: \"kubernetes.io/projected/12f6ce83-cb37-4e64-92c4-fb96aea3b213-kube-api-access-m2gt8\") pod \"cluster-samples-operator-665b6dd947-t84dj\" (UID: \"12f6ce83-cb37-4e64-92c4-fb96aea3b213\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.684487 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.695324 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.701883 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.703654 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.720024 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.724320 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.743663 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.763602 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.768140 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.768307 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.771637 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.771929 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.801457 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvg54\" (UniqueName: \"kubernetes.io/projected/b1d639aa-af8d-40c6-bd5a-6543735cce4e-kube-api-access-zvg54\") pod \"authentication-operator-69f744f599-t4sm7\" (UID: \"b1d639aa-af8d-40c6-bd5a-6543735cce4e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.813873 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.819568 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.820902 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52g4w\" (UniqueName: \"kubernetes.io/projected/984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6-kube-api-access-52g4w\") pod \"openshift-apiserver-operator-796bbdcf4f-qd6h4\" (UID: \"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.824286 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.844757 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.846943 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.866345 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.883081 4685 request.go:700] Waited for 1.919894712s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dolm-operator-serving-cert&limit=500&resourceVersion=0 Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.884874 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.904189 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.927369 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.945363 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.964483 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.984937 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 10:02:23 crc kubenswrapper[4685]: I1202 10:02:23.985264 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.005335 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.010366 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.016317 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.034958 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.046010 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.065366 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.084533 4685 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.109530 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.145250 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74cnc\" (UniqueName: \"kubernetes.io/projected/ae20ffd8-2f39-4fcd-a17e-0ae889c03d23-kube-api-access-74cnc\") pod \"machine-config-server-mpwz8\" (UID: \"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23\") " pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.145627 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-mpwz8" Dec 02 10:02:24 crc kubenswrapper[4685]: W1202 10:02:24.164041 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09cfde35_3539_4f7e_8985_9e0cd922f6e8.slice/crio-e1cadd51e57f9889dae27178ce0f74c7e2b7da04b9ceacb10736abdeb714ac68 WatchSource:0}: Error finding container e1cadd51e57f9889dae27178ce0f74c7e2b7da04b9ceacb10736abdeb714ac68: Status 404 returned error can't find the container with id e1cadd51e57f9889dae27178ce0f74c7e2b7da04b9ceacb10736abdeb714ac68 Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.187142 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7jpv\" (UniqueName: \"kubernetes.io/projected/2655a11a-813f-40db-9e51-33122afc460a-kube-api-access-v7jpv\") pod \"dns-operator-744455d44c-jmz9s\" (UID: \"2655a11a-813f-40db-9e51-33122afc460a\") " pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.200730 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/aea099c8-3890-4ce5-9965-10f136542d87-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.207835 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6136cbbe-cd55-4ba7-829a-621906a4b7b7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.223666 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdh84\" (UniqueName: \"kubernetes.io/projected/6136cbbe-cd55-4ba7-829a-621906a4b7b7-kube-api-access-tdh84\") pod \"ingress-operator-5b745b69d9-6mj6f\" (UID: \"6136cbbe-cd55-4ba7-829a-621906a4b7b7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.239861 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7tsf\" (UniqueName: \"kubernetes.io/projected/de06fcee-1542-419a-87ae-cb854b6b4a5e-kube-api-access-d7tsf\") pod \"ingress-canary-269n4\" (UID: \"de06fcee-1542-419a-87ae-cb854b6b4a5e\") " pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.260330 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfmhp\" (UniqueName: \"kubernetes.io/projected/4135e4b9-a4ab-4961-8dfa-7c0e83fa1970-kube-api-access-gfmhp\") pod \"service-ca-9c57cc56f-nr46b\" (UID: \"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970\") " pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.278473 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wwb8\" (UniqueName: \"kubernetes.io/projected/aea099c8-3890-4ce5-9965-10f136542d87-kube-api-access-4wwb8\") pod \"cluster-image-registry-operator-dc59b4c8b-wkzmt\" (UID: \"aea099c8-3890-4ce5-9965-10f136542d87\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.307331 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmm8h\" (UniqueName: \"kubernetes.io/projected/096d8145-ede4-4af3-a326-7a1739ca1dc4-kube-api-access-rmm8h\") pod \"router-default-5444994796-tkpq4\" (UID: \"096d8145-ede4-4af3-a326-7a1739ca1dc4\") " pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.350988 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.362616 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.367110 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.370239 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376679 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/153dc8a6-429a-40f9-abb1-3c3a8388c018-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376720 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-certificates\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376737 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/153dc8a6-429a-40f9-abb1-3c3a8388c018-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376754 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-tls\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376786 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-trusted-ca\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376801 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a4fad5a-444e-4161-b3df-a473b62dca2d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376915 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a4fad5a-444e-4161-b3df-a473b62dca2d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.376933 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/153dc8a6-429a-40f9-abb1-3c3a8388c018-config\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.377003 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.377018 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-bound-sa-token\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.377034 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks7mv\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-kube-api-access-ks7mv\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.401645 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:24.901623253 +0000 UTC m=+37.273397427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.403438 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" event={"ID":"09cfde35-3539-4f7e-8985-9e0cd922f6e8","Type":"ContainerStarted","Data":"e1cadd51e57f9889dae27178ce0f74c7e2b7da04b9ceacb10736abdeb714ac68"} Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.415138 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.417232 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-mpwz8" event={"ID":"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23","Type":"ContainerStarted","Data":"4da0d7c15ff12716fe3c986f8ac33a581bd0a40376d7508a78065e2ea5bbc825"} Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.422561 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.437853 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.476873 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477554 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477778 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb33fcfc-73bb-4485-928f-0986d4734b50-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477799 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-config\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477839 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/20083215-8e96-41e3-b76f-68dd87550bc6-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477902 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/79c39ecf-eebd-46cd-8b34-768197b3e2ac-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-n94sx\" (UID: \"79c39ecf-eebd-46cd-8b34-768197b3e2ac\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477924 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7def2210-de93-4381-84dd-fe0d507f76b3-secret-volume\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477952 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fnnq\" (UniqueName: \"kubernetes.io/projected/cea96603-d7a1-4b75-9c91-612fd0744351-kube-api-access-2fnnq\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.477988 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d3c08e9-d369-4369-b3f1-a36b85a49b39-proxy-tls\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478022 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-ca\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478038 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-csi-data-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478075 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vchr4\" (UniqueName: \"kubernetes.io/projected/c69d86c9-b765-4489-ab56-d93126192812-kube-api-access-vchr4\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478128 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kr64\" (UniqueName: \"kubernetes.io/projected/aa1aa5a6-03c6-4256-93e4-6000b78f4481-kube-api-access-2kr64\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478554 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.478864 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:24.978847655 +0000 UTC m=+37.350621809 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478934 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/79128f87-7854-4a71-82cb-6c6f7cd51e8c-srv-cert\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.478980 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-tls\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.479023 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89lpj\" (UniqueName: \"kubernetes.io/projected/4cea16e1-b06c-49e3-bc27-791409603cf2-kube-api-access-89lpj\") pod \"multus-admission-controller-857f4d67dd-kj84m\" (UID: \"4cea16e1-b06c-49e3-bc27-791409603cf2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.479885 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhshf\" (UniqueName: \"kubernetes.io/projected/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-kube-api-access-fhshf\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.479908 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d3c08e9-d369-4369-b3f1-a36b85a49b39-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.479927 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-registration-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.480132 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a4fad5a-444e-4161-b3df-a473b62dca2d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.480149 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/aa1aa5a6-03c6-4256-93e4-6000b78f4481-metrics-tls\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.482179 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dp9v\" (UniqueName: \"kubernetes.io/projected/79128f87-7854-4a71-82cb-6c6f7cd51e8c-kube-api-access-7dp9v\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.482218 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb33fcfc-73bb-4485-928f-0986d4734b50-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.482268 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98vbf\" (UniqueName: \"kubernetes.io/projected/e7b00c8d-9311-4164-95fd-15c6564d7cf0-kube-api-access-98vbf\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483437 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483466 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cea96603-d7a1-4b75-9c91-612fd0744351-config\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483487 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/20083215-8e96-41e3-b76f-68dd87550bc6-ready\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483736 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7def2210-de93-4381-84dd-fe0d507f76b3-config-volume\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483757 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7b00c8d-9311-4164-95fd-15c6564d7cf0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483777 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-bound-sa-token\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483791 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-socket-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483807 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrqt4\" (UniqueName: \"kubernetes.io/projected/7def2210-de93-4381-84dd-fe0d507f76b3-kube-api-access-rrqt4\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483823 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb33fcfc-73bb-4485-928f-0986d4734b50-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483856 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-serving-cert\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483905 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/70ecf800-4f19-4a60-834f-d72f0a1d2d37-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rpwnx\" (UID: \"70ecf800-4f19-4a60-834f-d72f0a1d2d37\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483921 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqgb2\" (UniqueName: \"kubernetes.io/projected/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-kube-api-access-dqgb2\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483936 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npb5l\" (UniqueName: \"kubernetes.io/projected/8d3c08e9-d369-4369-b3f1-a36b85a49b39-kube-api-access-npb5l\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483984 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-service-ca\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.483999 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-mountpoint-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484024 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ab49ef86-a462-4609-aaf7-30d74883b8a9-profile-collector-cert\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484062 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/153dc8a6-429a-40f9-abb1-3c3a8388c018-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484078 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e7b00c8d-9311-4164-95fd-15c6564d7cf0-images\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484098 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4cea16e1-b06c-49e3-bc27-791409603cf2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kj84m\" (UID: \"4cea16e1-b06c-49e3-bc27-791409603cf2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484114 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgkkb\" (UniqueName: \"kubernetes.io/projected/aa6c0d43-3b58-41f4-bf4b-d9765bde5c98-kube-api-access-wgkkb\") pod \"migrator-59844c95c7-cm2tb\" (UID: \"aa6c0d43-3b58-41f4-bf4b-d9765bde5c98\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484146 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-plugins-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484161 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-webhook-cert\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484175 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-config\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484192 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3445549e-e348-450c-982f-1456d6d029d0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484209 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484226 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-certificates\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484242 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/153dc8a6-429a-40f9-abb1-3c3a8388c018-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484258 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484273 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e7b00c8d-9311-4164-95fd-15c6564d7cf0-proxy-tls\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484289 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnjrr\" (UniqueName: \"kubernetes.io/projected/70ecf800-4f19-4a60-834f-d72f0a1d2d37-kube-api-access-lnjrr\") pod \"control-plane-machine-set-operator-78cbb6b69f-rpwnx\" (UID: \"70ecf800-4f19-4a60-834f-d72f0a1d2d37\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.484326 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-client\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.490907 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-tmpfs\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.491023 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/79128f87-7854-4a71-82cb-6c6f7cd51e8c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.491066 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-trusted-ca\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.491082 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-apiservice-cert\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.491150 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ab49ef86-a462-4609-aaf7-30d74883b8a9-srv-cert\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492496 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a4fad5a-444e-4161-b3df-a473b62dca2d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492520 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cea96603-d7a1-4b75-9c91-612fd0744351-serving-cert\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492536 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knrm5\" (UniqueName: \"kubernetes.io/projected/ab49ef86-a462-4609-aaf7-30d74883b8a9-kube-api-access-knrm5\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492603 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/153dc8a6-429a-40f9-abb1-3c3a8388c018-config\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492623 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/20083215-8e96-41e3-b76f-68dd87550bc6-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492684 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492749 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492769 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wss6q\" (UniqueName: \"kubernetes.io/projected/79c39ecf-eebd-46cd-8b34-768197b3e2ac-kube-api-access-wss6q\") pod \"package-server-manager-789f6589d5-n94sx\" (UID: \"79c39ecf-eebd-46cd-8b34-768197b3e2ac\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492787 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z44ms\" (UniqueName: \"kubernetes.io/projected/370d38c8-640e-44bd-a095-75fb04be6320-kube-api-access-z44ms\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492856 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks7mv\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-kube-api-access-ks7mv\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492901 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1aa5a6-03c6-4256-93e4-6000b78f4481-config-volume\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492919 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvmv2\" (UniqueName: \"kubernetes.io/projected/20083215-8e96-41e3-b76f-68dd87550bc6-kube-api-access-mvmv2\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492935 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfqwg\" (UniqueName: \"kubernetes.io/projected/3445549e-e348-450c-982f-1456d6d029d0-kube-api-access-hfqwg\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.492992 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3445549e-e348-450c-982f-1456d6d029d0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.498925 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-269n4" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.491518 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.501007 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/153dc8a6-429a-40f9-abb1-3c3a8388c018-config\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.491222 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-tls\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.501341 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.001327087 +0000 UTC m=+37.373101301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.501413 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a4fad5a-444e-4161-b3df-a473b62dca2d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.504100 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-certificates\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.507715 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a4fad5a-444e-4161-b3df-a473b62dca2d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.507750 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-trusted-ca\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.532539 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-bound-sa-token\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.558678 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/153dc8a6-429a-40f9-abb1-3c3a8388c018-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.589255 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks7mv\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-kube-api-access-ks7mv\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.595724 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.595909 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/20083215-8e96-41e3-b76f-68dd87550bc6-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.595939 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.595990 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wss6q\" (UniqueName: \"kubernetes.io/projected/79c39ecf-eebd-46cd-8b34-768197b3e2ac-kube-api-access-wss6q\") pod \"package-server-manager-789f6589d5-n94sx\" (UID: \"79c39ecf-eebd-46cd-8b34-768197b3e2ac\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596014 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z44ms\" (UniqueName: \"kubernetes.io/projected/370d38c8-640e-44bd-a095-75fb04be6320-kube-api-access-z44ms\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596037 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1aa5a6-03c6-4256-93e4-6000b78f4481-config-volume\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596062 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvmv2\" (UniqueName: \"kubernetes.io/projected/20083215-8e96-41e3-b76f-68dd87550bc6-kube-api-access-mvmv2\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596083 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfqwg\" (UniqueName: \"kubernetes.io/projected/3445549e-e348-450c-982f-1456d6d029d0-kube-api-access-hfqwg\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596115 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3445549e-e348-450c-982f-1456d6d029d0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596141 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-config\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596168 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/20083215-8e96-41e3-b76f-68dd87550bc6-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596189 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb33fcfc-73bb-4485-928f-0986d4734b50-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596213 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/79c39ecf-eebd-46cd-8b34-768197b3e2ac-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-n94sx\" (UID: \"79c39ecf-eebd-46cd-8b34-768197b3e2ac\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596236 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7def2210-de93-4381-84dd-fe0d507f76b3-secret-volume\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596269 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fnnq\" (UniqueName: \"kubernetes.io/projected/cea96603-d7a1-4b75-9c91-612fd0744351-kube-api-access-2fnnq\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596305 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d3c08e9-d369-4369-b3f1-a36b85a49b39-proxy-tls\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596330 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-ca\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596345 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-csi-data-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596359 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vchr4\" (UniqueName: \"kubernetes.io/projected/c69d86c9-b765-4489-ab56-d93126192812-kube-api-access-vchr4\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596385 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kr64\" (UniqueName: \"kubernetes.io/projected/aa1aa5a6-03c6-4256-93e4-6000b78f4481-kube-api-access-2kr64\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596406 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/79128f87-7854-4a71-82cb-6c6f7cd51e8c-srv-cert\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596431 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89lpj\" (UniqueName: \"kubernetes.io/projected/4cea16e1-b06c-49e3-bc27-791409603cf2-kube-api-access-89lpj\") pod \"multus-admission-controller-857f4d67dd-kj84m\" (UID: \"4cea16e1-b06c-49e3-bc27-791409603cf2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596459 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhshf\" (UniqueName: \"kubernetes.io/projected/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-kube-api-access-fhshf\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596475 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d3c08e9-d369-4369-b3f1-a36b85a49b39-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596490 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-registration-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596508 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/aa1aa5a6-03c6-4256-93e4-6000b78f4481-metrics-tls\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596524 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dp9v\" (UniqueName: \"kubernetes.io/projected/79128f87-7854-4a71-82cb-6c6f7cd51e8c-kube-api-access-7dp9v\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596540 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb33fcfc-73bb-4485-928f-0986d4734b50-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596602 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98vbf\" (UniqueName: \"kubernetes.io/projected/e7b00c8d-9311-4164-95fd-15c6564d7cf0-kube-api-access-98vbf\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596621 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596638 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cea96603-d7a1-4b75-9c91-612fd0744351-config\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596656 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/20083215-8e96-41e3-b76f-68dd87550bc6-ready\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596676 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7def2210-de93-4381-84dd-fe0d507f76b3-config-volume\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596693 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7b00c8d-9311-4164-95fd-15c6564d7cf0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596707 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-socket-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596723 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrqt4\" (UniqueName: \"kubernetes.io/projected/7def2210-de93-4381-84dd-fe0d507f76b3-kube-api-access-rrqt4\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596739 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb33fcfc-73bb-4485-928f-0986d4734b50-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596760 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-serving-cert\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596777 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/70ecf800-4f19-4a60-834f-d72f0a1d2d37-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rpwnx\" (UID: \"70ecf800-4f19-4a60-834f-d72f0a1d2d37\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596795 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqgb2\" (UniqueName: \"kubernetes.io/projected/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-kube-api-access-dqgb2\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596812 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npb5l\" (UniqueName: \"kubernetes.io/projected/8d3c08e9-d369-4369-b3f1-a36b85a49b39-kube-api-access-npb5l\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596828 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-service-ca\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596841 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-mountpoint-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596858 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ab49ef86-a462-4609-aaf7-30d74883b8a9-profile-collector-cert\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596875 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e7b00c8d-9311-4164-95fd-15c6564d7cf0-images\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596890 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4cea16e1-b06c-49e3-bc27-791409603cf2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kj84m\" (UID: \"4cea16e1-b06c-49e3-bc27-791409603cf2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596911 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgkkb\" (UniqueName: \"kubernetes.io/projected/aa6c0d43-3b58-41f4-bf4b-d9765bde5c98-kube-api-access-wgkkb\") pod \"migrator-59844c95c7-cm2tb\" (UID: \"aa6c0d43-3b58-41f4-bf4b-d9765bde5c98\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596925 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-plugins-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596939 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-webhook-cert\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596954 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-config\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596969 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3445549e-e348-450c-982f-1456d6d029d0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.596986 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597010 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597031 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e7b00c8d-9311-4164-95fd-15c6564d7cf0-proxy-tls\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnjrr\" (UniqueName: \"kubernetes.io/projected/70ecf800-4f19-4a60-834f-d72f0a1d2d37-kube-api-access-lnjrr\") pod \"control-plane-machine-set-operator-78cbb6b69f-rpwnx\" (UID: \"70ecf800-4f19-4a60-834f-d72f0a1d2d37\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597073 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-client\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597094 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-tmpfs\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597111 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/79128f87-7854-4a71-82cb-6c6f7cd51e8c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597127 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-apiservice-cert\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597147 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ab49ef86-a462-4609-aaf7-30d74883b8a9-srv-cert\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597173 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cea96603-d7a1-4b75-9c91-612fd0744351-serving-cert\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597172 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-registration-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597189 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knrm5\" (UniqueName: \"kubernetes.io/projected/ab49ef86-a462-4609-aaf7-30d74883b8a9-kube-api-access-knrm5\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597297 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-csi-data-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.597903 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa1aa5a6-03c6-4256-93e4-6000b78f4481-config-volume\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.597995 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.097975377 +0000 UTC m=+37.469749561 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.598047 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/20083215-8e96-41e3-b76f-68dd87550bc6-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.598821 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3445549e-e348-450c-982f-1456d6d029d0-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.599471 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8d3c08e9-d369-4369-b3f1-a36b85a49b39-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.599547 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-plugins-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.599852 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.601232 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e7b00c8d-9311-4164-95fd-15c6564d7cf0-images\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.606357 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-config\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.606391 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cb33fcfc-73bb-4485-928f-0986d4734b50-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.607039 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/20083215-8e96-41e3-b76f-68dd87550bc6-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.609003 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4cea16e1-b06c-49e3-bc27-791409603cf2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kj84m\" (UID: \"4cea16e1-b06c-49e3-bc27-791409603cf2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.610580 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/aa1aa5a6-03c6-4256-93e4-6000b78f4481-metrics-tls\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.612351 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7def2210-de93-4381-84dd-fe0d507f76b3-secret-volume\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.617724 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cea96603-d7a1-4b75-9c91-612fd0744351-config\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.617931 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-webhook-cert\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.617944 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/20083215-8e96-41e3-b76f-68dd87550bc6-ready\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.618660 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7b00c8d-9311-4164-95fd-15c6564d7cf0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.618774 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-socket-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.619121 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-ca\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.624086 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb33fcfc-73bb-4485-928f-0986d4734b50-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.625613 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-config\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.627475 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7def2210-de93-4381-84dd-fe0d507f76b3-config-volume\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.627866 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-tmpfs\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.628276 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/ab49ef86-a462-4609-aaf7-30d74883b8a9-profile-collector-cert\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.628760 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/79c39ecf-eebd-46cd-8b34-768197b3e2ac-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-n94sx\" (UID: \"79c39ecf-eebd-46cd-8b34-768197b3e2ac\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.629299 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c69d86c9-b765-4489-ab56-d93126192812-mountpoint-dir\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.631197 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-service-ca\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.634801 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/79128f87-7854-4a71-82cb-6c6f7cd51e8c-srv-cert\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.637497 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-apiservice-cert\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.643523 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-serving-cert\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.644160 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/ab49ef86-a462-4609-aaf7-30d74883b8a9-srv-cert\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.649806 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.650134 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3445549e-e348-450c-982f-1456d6d029d0-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.653072 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cea96603-d7a1-4b75-9c91-612fd0744351-serving-cert\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.654903 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/79128f87-7854-4a71-82cb-6c6f7cd51e8c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.655692 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knrm5\" (UniqueName: \"kubernetes.io/projected/ab49ef86-a462-4609-aaf7-30d74883b8a9-kube-api-access-knrm5\") pod \"catalog-operator-68c6474976-drdhq\" (UID: \"ab49ef86-a462-4609-aaf7-30d74883b8a9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.659589 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e7b00c8d-9311-4164-95fd-15c6564d7cf0-proxy-tls\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.660089 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.667306 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vchr4\" (UniqueName: \"kubernetes.io/projected/c69d86c9-b765-4489-ab56-d93126192812-kube-api-access-vchr4\") pod \"csi-hostpathplugin-28vxg\" (UID: \"c69d86c9-b765-4489-ab56-d93126192812\") " pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.678375 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-etcd-client\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.682626 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/153dc8a6-429a-40f9-abb1-3c3a8388c018-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-cx6sh\" (UID: \"153dc8a6-429a-40f9-abb1-3c3a8388c018\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.688329 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8d3c08e9-d369-4369-b3f1-a36b85a49b39-proxy-tls\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.688845 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kr64\" (UniqueName: \"kubernetes.io/projected/aa1aa5a6-03c6-4256-93e4-6000b78f4481-kube-api-access-2kr64\") pod \"dns-default-vwpld\" (UID: \"aa1aa5a6-03c6-4256-93e4-6000b78f4481\") " pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.698625 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.698960 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.198949385 +0000 UTC m=+37.570723539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.700054 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89lpj\" (UniqueName: \"kubernetes.io/projected/4cea16e1-b06c-49e3-bc27-791409603cf2-kube-api-access-89lpj\") pod \"multus-admission-controller-857f4d67dd-kj84m\" (UID: \"4cea16e1-b06c-49e3-bc27-791409603cf2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.703544 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-m7vnk"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.705174 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.706345 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-hgnrz"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.706555 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.714445 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-mfk9b"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.714707 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/70ecf800-4f19-4a60-834f-d72f0a1d2d37-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rpwnx\" (UID: \"70ecf800-4f19-4a60-834f-d72f0a1d2d37\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.716609 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.723246 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vbfhw"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.726563 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.726629 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ksvk4"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.737783 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d1b66142-28fd-4d6c-b21e-7cc55e1d1672-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-74vzt\" (UID: \"d1b66142-28fd-4d6c-b21e-7cc55e1d1672\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.784036 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhshf\" (UniqueName: \"kubernetes.io/projected/71660d3a-ef68-4dc4-9d4f-bdae6372f2ec-kube-api-access-fhshf\") pod \"etcd-operator-b45778765-9rnjd\" (UID: \"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.806845 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.807273 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.807387 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.307365316 +0000 UTC m=+37.679139470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.807688 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.808017 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wss6q\" (UniqueName: \"kubernetes.io/projected/79c39ecf-eebd-46cd-8b34-768197b3e2ac-kube-api-access-wss6q\") pod \"package-server-manager-789f6589d5-n94sx\" (UID: \"79c39ecf-eebd-46cd-8b34-768197b3e2ac\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.808599 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.308564409 +0000 UTC m=+37.680338613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.809805 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z44ms\" (UniqueName: \"kubernetes.io/projected/370d38c8-640e-44bd-a095-75fb04be6320-kube-api-access-z44ms\") pod \"marketplace-operator-79b997595-xf986\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.816912 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.827980 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.835604 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvmv2\" (UniqueName: \"kubernetes.io/projected/20083215-8e96-41e3-b76f-68dd87550bc6-kube-api-access-mvmv2\") pod \"cni-sysctl-allowlist-ds-96htj\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.838144 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.845109 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fnnq\" (UniqueName: \"kubernetes.io/projected/cea96603-d7a1-4b75-9c91-612fd0744351-kube-api-access-2fnnq\") pod \"service-ca-operator-777779d784-cm7qg\" (UID: \"cea96603-d7a1-4b75-9c91-612fd0744351\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.857823 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfqwg\" (UniqueName: \"kubernetes.io/projected/3445549e-e348-450c-982f-1456d6d029d0-kube-api-access-hfqwg\") pod \"kube-storage-version-migrator-operator-b67b599dd-hwkfb\" (UID: \"3445549e-e348-450c-982f-1456d6d029d0\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.861024 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.871623 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrqt4\" (UniqueName: \"kubernetes.io/projected/7def2210-de93-4381-84dd-fe0d507f76b3-kube-api-access-rrqt4\") pod \"collect-profiles-29411160-sjs76\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.887102 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgkkb\" (UniqueName: \"kubernetes.io/projected/aa6c0d43-3b58-41f4-bf4b-d9765bde5c98-kube-api-access-wgkkb\") pod \"migrator-59844c95c7-cm2tb\" (UID: \"aa6c0d43-3b58-41f4-bf4b-d9765bde5c98\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.896103 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.899647 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dp9v\" (UniqueName: \"kubernetes.io/projected/79128f87-7854-4a71-82cb-6c6f7cd51e8c-kube-api-access-7dp9v\") pod \"olm-operator-6b444d44fb-hz9nf\" (UID: \"79128f87-7854-4a71-82cb-6c6f7cd51e8c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.908721 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:24 crc kubenswrapper[4685]: E1202 10:02:24.909077 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.409063603 +0000 UTC m=+37.780837757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.924934 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t5nfh"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.941539 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqgb2\" (UniqueName: \"kubernetes.io/projected/ae0fbaf9-ce34-4978-8d6b-6f40e1597c91-kube-api-access-dqgb2\") pod \"packageserver-d55dfcdfc-mkjcc\" (UID: \"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.941757 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.946282 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cb33fcfc-73bb-4485-928f-0986d4734b50-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-74mhp\" (UID: \"cb33fcfc-73bb-4485-928f-0986d4734b50\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.946839 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4"] Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.950943 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.961603 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.973212 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.982637 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.988622 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98vbf\" (UniqueName: \"kubernetes.io/projected/e7b00c8d-9311-4164-95fd-15c6564d7cf0-kube-api-access-98vbf\") pod \"machine-config-operator-74547568cd-sxwtf\" (UID: \"e7b00c8d-9311-4164-95fd-15c6564d7cf0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:24 crc kubenswrapper[4685]: I1202 10:02:24.994923 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.009104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnjrr\" (UniqueName: \"kubernetes.io/projected/70ecf800-4f19-4a60-834f-d72f0a1d2d37-kube-api-access-lnjrr\") pod \"control-plane-machine-set-operator-78cbb6b69f-rpwnx\" (UID: \"70ecf800-4f19-4a60-834f-d72f0a1d2d37\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.010209 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.010565 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.510543846 +0000 UTC m=+37.882318000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.027794 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npb5l\" (UniqueName: \"kubernetes.io/projected/8d3c08e9-d369-4369-b3f1-a36b85a49b39-kube-api-access-npb5l\") pod \"machine-config-controller-84d6567774-rtwwd\" (UID: \"8d3c08e9-d369-4369-b3f1-a36b85a49b39\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.056178 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-5glrx"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.077740 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-jmz9s"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.081089 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.111065 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.111766 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.61173965 +0000 UTC m=+37.983513804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.116394 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.116815 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.616799728 +0000 UTC m=+37.988573882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.132262 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t4sm7"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.150286 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.150897 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.169993 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.178772 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.187855 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.218066 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.218178 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.718159406 +0000 UTC m=+38.089933560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.218468 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.218852 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.718841685 +0000 UTC m=+38.090615839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.225162 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.234865 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.256254 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg"] Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.316233 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a30ded0_7ce0_4ed4_9e40_9266f1472af4.slice/crio-2a7917741788e839889f1c9f3c5bf1f78951fae5a966e4d78c6f5df9662aa130 WatchSource:0}: Error finding container 2a7917741788e839889f1c9f3c5bf1f78951fae5a966e4d78c6f5df9662aa130: Status 404 returned error can't find the container with id 2a7917741788e839889f1c9f3c5bf1f78951fae5a966e4d78c6f5df9662aa130 Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.317179 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2655a11a_813f_40db_9e51_33122afc460a.slice/crio-e44672c9d620613ebba0aa6071450cd8d7b3d276e9e7d749b0f9ffc471142bd8 WatchSource:0}: Error finding container e44672c9d620613ebba0aa6071450cd8d7b3d276e9e7d749b0f9ffc471142bd8: Status 404 returned error can't find the container with id e44672c9d620613ebba0aa6071450cd8d7b3d276e9e7d749b0f9ffc471142bd8 Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.320933 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.321260 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.821245622 +0000 UTC m=+38.193019776 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.322763 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-b6eb2ede31d7b3559c00fe962edc58640bbe04a3d8821d5bddc30b0faf7bb28b WatchSource:0}: Error finding container b6eb2ede31d7b3559c00fe962edc58640bbe04a3d8821d5bddc30b0faf7bb28b: Status 404 returned error can't find the container with id b6eb2ede31d7b3559c00fe962edc58640bbe04a3d8821d5bddc30b0faf7bb28b Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.329633 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-nr46b"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.377219 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.387477 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-269n4"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.424831 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.425978 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:25.925264023 +0000 UTC m=+38.297038177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.436309 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4f0de1b145ca0f51f2d8692e4b35d9a2ab9278589f073a589101aed3ed569264"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.437942 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hgnrz" event={"ID":"4e159366-bd40-43f3-9b3b-c818913c957a","Type":"ContainerStarted","Data":"953b50d8291904ca3a1fc81351b9bab761fa5960458702dec67b77fc1a5f33b4"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.438847 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" event={"ID":"2dee0464-6eab-44a6-a33d-3b6096319ecf","Type":"ContainerStarted","Data":"d3edd0ab2ffb4014764a07e8379783e7dc03ace5960e6b2da3d329ab628b73be"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.440305 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-tkpq4" event={"ID":"096d8145-ede4-4af3-a326-7a1739ca1dc4","Type":"ContainerStarted","Data":"b88367b7a8fc46fa2296c8a21e8227adfff1c0655ef25d9458c003eeaa601a5d"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.441139 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" event={"ID":"8e3956cb-a0a9-4add-862c-b7facbcbf400","Type":"ContainerStarted","Data":"c4c1ece5edf60b8becf9d6737d074e2c1da283174717674d0838122eb3126d07"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.441818 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" event={"ID":"0a30ded0-7ce0-4ed4-9e40-9266f1472af4","Type":"ContainerStarted","Data":"2a7917741788e839889f1c9f3c5bf1f78951fae5a966e4d78c6f5df9662aa130"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.442538 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-mpwz8" event={"ID":"ae20ffd8-2f39-4fcd-a17e-0ae889c03d23","Type":"ContainerStarted","Data":"9c0705ba96700fbfa31a0269292e959ac32e03d28d7ae95f34c1e011ea715903"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.449065 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" event={"ID":"332dd167-8e86-4071-a277-547ab88bc15d","Type":"ContainerStarted","Data":"9370a078279a958aa70996fedd4075365e9153f6b963534a3bfb1b2e7d79f411"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.468124 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" event={"ID":"c26cde94-5fa8-4f83-ba18-eef7033f6ec1","Type":"ContainerStarted","Data":"b04e27f85af5da114bff5a75c8b42922e05cc9d0b1124955ee2c362bcc929240"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.470954 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" event={"ID":"2655a11a-813f-40db-9e51-33122afc460a","Type":"ContainerStarted","Data":"e44672c9d620613ebba0aa6071450cd8d7b3d276e9e7d749b0f9ffc471142bd8"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.472016 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" event={"ID":"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3","Type":"ContainerStarted","Data":"62c2a0cfb6d51bf75afd5e5880eb1bb412e7877d4e48cbd68584eccfaa008cff"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.472711 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.472900 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"25e2187735cedeaa4dd8728664037a3b240dcfc9a1241b366014a2180b71609c"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.474365 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" event={"ID":"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6","Type":"ContainerStarted","Data":"e2b50cac1971bf1bd0267716c7053566b6f4be2694ded1967c875f89a1c183ae"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.476215 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" event={"ID":"b1d639aa-af8d-40c6-bd5a-6543735cce4e","Type":"ContainerStarted","Data":"5c9e56c87c642e5c2f9d141fe87cc148b1be17e34e6902bacdc70fd43a7c0b8d"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.477504 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b6eb2ede31d7b3559c00fe962edc58640bbe04a3d8821d5bddc30b0faf7bb28b"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.478477 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" event={"ID":"387d4a50-7633-485d-b8d3-ef2afc8c3c97","Type":"ContainerStarted","Data":"5ed4a3dced196e61aeee9b314d7b3f420895ebeed3466af2e13ee3017cdfefca"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.479318 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-m7vnk" event={"ID":"60826f81-d0cb-4339-83b0-d40a5df1aff8","Type":"ContainerStarted","Data":"310ed885d3fc7ff3436610ded058b7b0ac3974116b9f0af735de4a8c997dd921"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.481043 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" event={"ID":"6651d927-5e58-43b0-8c92-f425f6145e31","Type":"ContainerStarted","Data":"c5efdaeefff12e7600eda33b899473fe648f09fdb98a338199b1d08080f28b60"} Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.513182 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.520072 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.525343 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.525692 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.025674866 +0000 UTC m=+38.397449020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.556732 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod589484c8_ce97_4e9e_b17a_58af833d9915.slice/crio-ac39432fa95fb9d430b4aee2ba9b6c982712bc4846af7dce17fde45c067fb942 WatchSource:0}: Error finding container ac39432fa95fb9d430b4aee2ba9b6c982712bc4846af7dce17fde45c067fb942: Status 404 returned error can't find the container with id ac39432fa95fb9d430b4aee2ba9b6c982712bc4846af7dce17fde45c067fb942 Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.579258 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6136cbbe_cd55_4ba7_829a_621906a4b7b7.slice/crio-b59f3588dc38cb872208012909504abab48bc977206bbed6e3c86b6a5c120fdd WatchSource:0}: Error finding container b59f3588dc38cb872208012909504abab48bc977206bbed6e3c86b6a5c120fdd: Status 404 returned error can't find the container with id b59f3588dc38cb872208012909504abab48bc977206bbed6e3c86b6a5c120fdd Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.583880 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.585643 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4135e4b9_a4ab_4961_8dfa_7c0e83fa1970.slice/crio-44299e5c8081080a1941e5d522098fb03714d9181fd84159cf6f3d5f3face873 WatchSource:0}: Error finding container 44299e5c8081080a1941e5d522098fb03714d9181fd84159cf6f3d5f3face873: Status 404 returned error can't find the container with id 44299e5c8081080a1941e5d522098fb03714d9181fd84159cf6f3d5f3face873 Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.590648 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod153dc8a6_429a_40f9_abb1_3c3a8388c018.slice/crio-a0f341507bdf5fe6252b9176d13547100beb6391d1b8342e2e563ac26d410877 WatchSource:0}: Error finding container a0f341507bdf5fe6252b9176d13547100beb6391d1b8342e2e563ac26d410877: Status 404 returned error can't find the container with id a0f341507bdf5fe6252b9176d13547100beb6391d1b8342e2e563ac26d410877 Dec 02 10:02:25 crc kubenswrapper[4685]: W1202 10:02:25.596063 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b66142_28fd_4d6c_b21e_7cc55e1d1672.slice/crio-d06d0f6fd7422bc6486fa9d4099803bb4e7983814b567fe22fcbd23d74718ccc WatchSource:0}: Error finding container d06d0f6fd7422bc6486fa9d4099803bb4e7983814b567fe22fcbd23d74718ccc: Status 404 returned error can't find the container with id d06d0f6fd7422bc6486fa9d4099803bb4e7983814b567fe22fcbd23d74718ccc Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.633367 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.634083 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.133693475 +0000 UTC m=+38.505467629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.735081 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.735342 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.235327671 +0000 UTC m=+38.607101825 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.813401 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg"] Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.836719 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.837089 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.337073441 +0000 UTC m=+38.708847595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.937924 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.938044 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.438025929 +0000 UTC m=+38.809800083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.938197 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:25 crc kubenswrapper[4685]: E1202 10:02:25.938459 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.43845247 +0000 UTC m=+38.810226624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:25 crc kubenswrapper[4685]: I1202 10:02:25.952988 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.040166 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.040491 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.540472646 +0000 UTC m=+38.912246800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.107503 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9rnjd"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.127903 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-28vxg"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.142867 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.143224 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.643202812 +0000 UTC m=+39.014976966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.217600 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.233726 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx"] Dec 02 10:02:26 crc kubenswrapper[4685]: W1202 10:02:26.241459 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20083215_8e96_41e3_b76f_68dd87550bc6.slice/crio-00310bd472bbce356c1b91a61b7230340e278ac59df4decdc38411510260bd5c WatchSource:0}: Error finding container 00310bd472bbce356c1b91a61b7230340e278ac59df4decdc38411510260bd5c: Status 404 returned error can't find the container with id 00310bd472bbce356c1b91a61b7230340e278ac59df4decdc38411510260bd5c Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.244672 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.245000 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.744986602 +0000 UTC m=+39.116760756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: W1202 10:02:26.291646 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71660d3a_ef68_4dc4_9d4f_bdae6372f2ec.slice/crio-575a8fd400a03083bc0e3a9f82f32a717870f595add3716aa16c3e2efcdf1ddf WatchSource:0}: Error finding container 575a8fd400a03083bc0e3a9f82f32a717870f595add3716aa16c3e2efcdf1ddf: Status 404 returned error can't find the container with id 575a8fd400a03083bc0e3a9f82f32a717870f595add3716aa16c3e2efcdf1ddf Dec 02 10:02:26 crc kubenswrapper[4685]: W1202 10:02:26.305477 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79c39ecf_eebd_46cd_8b34_768197b3e2ac.slice/crio-0d65c82f2e69ccdc1615732b79ee19d9e8d7bfbb3c914e2c2ebf218054a56131 WatchSource:0}: Error finding container 0d65c82f2e69ccdc1615732b79ee19d9e8d7bfbb3c914e2c2ebf218054a56131: Status 404 returned error can't find the container with id 0d65c82f2e69ccdc1615732b79ee19d9e8d7bfbb3c914e2c2ebf218054a56131 Dec 02 10:02:26 crc kubenswrapper[4685]: W1202 10:02:26.306873 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79128f87_7854_4a71_82cb_6c6f7cd51e8c.slice/crio-91dbbc2e274fd0504e1236f8ff1d03ebb658a1d643dc118dc2089d5bcb82d1e4 WatchSource:0}: Error finding container 91dbbc2e274fd0504e1236f8ff1d03ebb658a1d643dc118dc2089d5bcb82d1e4: Status 404 returned error can't find the container with id 91dbbc2e274fd0504e1236f8ff1d03ebb658a1d643dc118dc2089d5bcb82d1e4 Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.345812 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.346307 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.346544 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.846534227 +0000 UTC m=+39.218308371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.454481 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.455029 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:26.955003488 +0000 UTC m=+39.326777672 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: W1202 10:02:26.484914 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70ecf800_4f19_4a60_834f_d72f0a1d2d37.slice/crio-1e727aea78011f2d15af0362aba3166c1b7c97da36f709497695e3c172fd1058 WatchSource:0}: Error finding container 1e727aea78011f2d15af0362aba3166c1b7c97da36f709497695e3c172fd1058: Status 404 returned error can't find the container with id 1e727aea78011f2d15af0362aba3166c1b7c97da36f709497695e3c172fd1058 Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.499568 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" event={"ID":"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970","Type":"ContainerStarted","Data":"44299e5c8081080a1941e5d522098fb03714d9181fd84159cf6f3d5f3face873"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.515061 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" event={"ID":"20083215-8e96-41e3-b76f-68dd87550bc6","Type":"ContainerStarted","Data":"00310bd472bbce356c1b91a61b7230340e278ac59df4decdc38411510260bd5c"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.519001 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.520212 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" event={"ID":"d1b66142-28fd-4d6c-b21e-7cc55e1d1672","Type":"ContainerStarted","Data":"d06d0f6fd7422bc6486fa9d4099803bb4e7983814b567fe22fcbd23d74718ccc"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.526133 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" event={"ID":"6136cbbe-cd55-4ba7-829a-621906a4b7b7","Type":"ContainerStarted","Data":"b59f3588dc38cb872208012909504abab48bc977206bbed6e3c86b6a5c120fdd"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.528144 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" event={"ID":"589484c8-ce97-4e9e-b17a-58af833d9915","Type":"ContainerStarted","Data":"ac39432fa95fb9d430b4aee2ba9b6c982712bc4846af7dce17fde45c067fb942"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.528310 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kj84m"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.534987 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" event={"ID":"79128f87-7854-4a71-82cb-6c6f7cd51e8c","Type":"ContainerStarted","Data":"91dbbc2e274fd0504e1236f8ff1d03ebb658a1d643dc118dc2089d5bcb82d1e4"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.556217 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.556538 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.056523722 +0000 UTC m=+39.428297886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.563641 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" event={"ID":"153dc8a6-429a-40f9-abb1-3c3a8388c018","Type":"ContainerStarted","Data":"a0f341507bdf5fe6252b9176d13547100beb6391d1b8342e2e563ac26d410877"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.567676 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" event={"ID":"c69d86c9-b765-4489-ab56-d93126192812","Type":"ContainerStarted","Data":"3ef37512d3de7a9ad523db9778f5b8979c853a0750b37161dde78669fe364801"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.576679 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" event={"ID":"09cfde35-3539-4f7e-8985-9e0cd922f6e8","Type":"ContainerStarted","Data":"97f5588f68f0ed8b8d256018e1a7eb5812f287933187ab7ff15899b94888dce3"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.578363 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" event={"ID":"cea96603-d7a1-4b75-9c91-612fd0744351","Type":"ContainerStarted","Data":"59f18fb8b511255aa320844aa6f0b3c9c8053a4eaf5712d018a524c191063017"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.579880 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" event={"ID":"79c39ecf-eebd-46cd-8b34-768197b3e2ac","Type":"ContainerStarted","Data":"0d65c82f2e69ccdc1615732b79ee19d9e8d7bfbb3c914e2c2ebf218054a56131"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.580714 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" event={"ID":"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec","Type":"ContainerStarted","Data":"575a8fd400a03083bc0e3a9f82f32a717870f595add3716aa16c3e2efcdf1ddf"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.585879 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" event={"ID":"aea099c8-3890-4ce5-9965-10f136542d87","Type":"ContainerStarted","Data":"e1ef4b5740e3fab4d81b531025b5e833c49d50ff940f7ea6c58a29cbbfd9ded6"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.590124 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-269n4" event={"ID":"de06fcee-1542-419a-87ae-cb854b6b4a5e","Type":"ContainerStarted","Data":"a2c1bfe27dc8ca7ce22a57fe9f0ae32874522965ca8ac37642a73715ab47adda"} Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.657211 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.657500 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.15748518 +0000 UTC m=+39.529259334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.758645 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.758920 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.25890807 +0000 UTC m=+39.630682224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: W1202 10:02:26.759960 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cea16e1_b06c_49e3_bc27_791409603cf2.slice/crio-48a1182a9e684abb90dbb49cc1babffc1b35707173b5692b3570faa579f8b6b2 WatchSource:0}: Error finding container 48a1182a9e684abb90dbb49cc1babffc1b35707173b5692b3570faa579f8b6b2: Status 404 returned error can't find the container with id 48a1182a9e684abb90dbb49cc1babffc1b35707173b5692b3570faa579f8b6b2 Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.776476 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76"] Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.861183 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.861474 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.36145932 +0000 UTC m=+39.733233474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: E1202 10:02:26.963948 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.46393469 +0000 UTC m=+39.835708844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.963462 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:26 crc kubenswrapper[4685]: I1202 10:02:26.996743 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-vwpld"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.029390 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.029435 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xf986"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.038827 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.040460 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.066362 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.066676 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.566661016 +0000 UTC m=+39.938435170 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.167184 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.167479 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.66746843 +0000 UTC m=+40.039242584 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: W1202 10:02:27.190111 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb33fcfc_73bb_4485_928f_0986d4734b50.slice/crio-0f4aa1c5b3562aba9e3f105c81d2e787047c2917258c63d3493de671829bec77 WatchSource:0}: Error finding container 0f4aa1c5b3562aba9e3f105c81d2e787047c2917258c63d3493de671829bec77: Status 404 returned error can't find the container with id 0f4aa1c5b3562aba9e3f105c81d2e787047c2917258c63d3493de671829bec77 Dec 02 10:02:27 crc kubenswrapper[4685]: W1202 10:02:27.196248 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7b00c8d_9311_4164_95fd_15c6564d7cf0.slice/crio-dc28407179d5cdc642d136bb7c9b8441fd2abfdc9a70bb5f9fa66973abbd074d WatchSource:0}: Error finding container dc28407179d5cdc642d136bb7c9b8441fd2abfdc9a70bb5f9fa66973abbd074d: Status 404 returned error can't find the container with id dc28407179d5cdc642d136bb7c9b8441fd2abfdc9a70bb5f9fa66973abbd074d Dec 02 10:02:27 crc kubenswrapper[4685]: W1202 10:02:27.199415 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae0fbaf9_ce34_4978_8d6b_6f40e1597c91.slice/crio-9fe1a145183165bb0fee77293d4bda6ea960736dae46b1aeaf07bf42c02f3935 WatchSource:0}: Error finding container 9fe1a145183165bb0fee77293d4bda6ea960736dae46b1aeaf07bf42c02f3935: Status 404 returned error can't find the container with id 9fe1a145183165bb0fee77293d4bda6ea960736dae46b1aeaf07bf42c02f3935 Dec 02 10:02:27 crc kubenswrapper[4685]: W1202 10:02:27.201144 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod370d38c8_640e_44bd_a095_75fb04be6320.slice/crio-7687bace00cdcf0bacaa4146bf764b6c13e0c987b28d5fd3771a12cdbc452b51 WatchSource:0}: Error finding container 7687bace00cdcf0bacaa4146bf764b6c13e0c987b28d5fd3771a12cdbc452b51: Status 404 returned error can't find the container with id 7687bace00cdcf0bacaa4146bf764b6c13e0c987b28d5fd3771a12cdbc452b51 Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.269005 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.269368 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.769353752 +0000 UTC m=+40.141127906 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.270353 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.275041 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd"] Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.382742 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.383062 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.883050717 +0000 UTC m=+40.254824871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: W1202 10:02:27.408963 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d3c08e9_d369_4369_b3f1_a36b85a49b39.slice/crio-a778ce23a7eb8403c96c1a93545a1366220c37d938f7b5619125db3bf5c22bd3 WatchSource:0}: Error finding container a778ce23a7eb8403c96c1a93545a1366220c37d938f7b5619125db3bf5c22bd3: Status 404 returned error can't find the container with id a778ce23a7eb8403c96c1a93545a1366220c37d938f7b5619125db3bf5c22bd3 Dec 02 10:02:27 crc kubenswrapper[4685]: W1202 10:02:27.417867 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab49ef86_a462_4609_aaf7_30d74883b8a9.slice/crio-be2239727525718cd08073c5271657a3c1b1878f7d14ebb234cf52f7d86d5868 WatchSource:0}: Error finding container be2239727525718cd08073c5271657a3c1b1878f7d14ebb234cf52f7d86d5868: Status 404 returned error can't find the container with id be2239727525718cd08073c5271657a3c1b1878f7d14ebb234cf52f7d86d5868 Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.483441 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.483659 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.485096 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:27.985074463 +0000 UTC m=+40.356848617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.492076 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25987b61-91e8-4fbc-b90f-3a00533ef0b5-metrics-certs\") pod \"network-metrics-daemon-rs84c\" (UID: \"25987b61-91e8-4fbc-b90f-3a00533ef0b5\") " pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.584636 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.585050 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.085033374 +0000 UTC m=+40.456807528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.594380 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" event={"ID":"12f6ce83-cb37-4e64-92c4-fb96aea3b213","Type":"ContainerStarted","Data":"c2a656621b5535c3d8236ff0ff41e470d4b89aa2f548d39e7c3cb42ffe53a62c"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.595427 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" event={"ID":"e7b00c8d-9311-4164-95fd-15c6564d7cf0","Type":"ContainerStarted","Data":"dc28407179d5cdc642d136bb7c9b8441fd2abfdc9a70bb5f9fa66973abbd074d"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.596730 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" event={"ID":"cb33fcfc-73bb-4485-928f-0986d4734b50","Type":"ContainerStarted","Data":"0f4aa1c5b3562aba9e3f105c81d2e787047c2917258c63d3493de671829bec77"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.597834 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" event={"ID":"3445549e-e348-450c-982f-1456d6d029d0","Type":"ContainerStarted","Data":"6b9e065feff9ab2f71a892891c3c4cf8d88596f761d13a42c4c5cf1f029648b7"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.599049 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" event={"ID":"370d38c8-640e-44bd-a095-75fb04be6320","Type":"ContainerStarted","Data":"7687bace00cdcf0bacaa4146bf764b6c13e0c987b28d5fd3771a12cdbc452b51"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.599956 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" event={"ID":"ab49ef86-a462-4609-aaf7-30d74883b8a9","Type":"ContainerStarted","Data":"be2239727525718cd08073c5271657a3c1b1878f7d14ebb234cf52f7d86d5868"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.601020 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vwpld" event={"ID":"aa1aa5a6-03c6-4256-93e4-6000b78f4481","Type":"ContainerStarted","Data":"791bf3679ae03b09757ebd6b65a543b38dd2d604095101d440018f98a606608a"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.602348 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" event={"ID":"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91","Type":"ContainerStarted","Data":"9fe1a145183165bb0fee77293d4bda6ea960736dae46b1aeaf07bf42c02f3935"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.603678 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" event={"ID":"70ecf800-4f19-4a60-834f-d72f0a1d2d37","Type":"ContainerStarted","Data":"1e727aea78011f2d15af0362aba3166c1b7c97da36f709497695e3c172fd1058"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.604851 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rs84c" Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.604875 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" event={"ID":"4cea16e1-b06c-49e3-bc27-791409603cf2","Type":"ContainerStarted","Data":"48a1182a9e684abb90dbb49cc1babffc1b35707173b5692b3570faa579f8b6b2"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.605948 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" event={"ID":"7def2210-de93-4381-84dd-fe0d507f76b3","Type":"ContainerStarted","Data":"48d2a10a362b6f950d0611efd9be893d144d58ed12bf4098e84ac5e77806969d"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.607070 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" event={"ID":"8d3c08e9-d369-4369-b3f1-a36b85a49b39","Type":"ContainerStarted","Data":"a778ce23a7eb8403c96c1a93545a1366220c37d938f7b5619125db3bf5c22bd3"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.608204 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" event={"ID":"aa6c0d43-3b58-41f4-bf4b-d9765bde5c98","Type":"ContainerStarted","Data":"a9e7231b3af9f72960a8e5c4f549f839395bb486f4b458091225edafd07c77ca"} Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.621375 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-mpwz8" podStartSLOduration=6.621358263 podStartE2EDuration="6.621358263s" podCreationTimestamp="2025-12-02 10:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:27.621310512 +0000 UTC m=+39.993084676" watchObservedRunningTime="2025-12-02 10:02:27.621358263 +0000 UTC m=+39.993132407" Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.685840 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.685982 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.185954961 +0000 UTC m=+40.557729115 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.686063 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.686435 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.186419783 +0000 UTC m=+40.558193947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.787510 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.787709 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.287683249 +0000 UTC m=+40.659457403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.787855 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.788172 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.288164462 +0000 UTC m=+40.659938616 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.889083 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.889501 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.38948531 +0000 UTC m=+40.761259464 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:27 crc kubenswrapper[4685]: I1202 10:02:27.990372 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:27 crc kubenswrapper[4685]: E1202 10:02:27.991543 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.491526827 +0000 UTC m=+40.863300971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.090964 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.091228 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.59120966 +0000 UTC m=+40.962983814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.192191 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.192546 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.692523848 +0000 UTC m=+41.064298002 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.293814 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.294144 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.794122073 +0000 UTC m=+41.165896297 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.395530 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.395933 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.895917713 +0000 UTC m=+41.267691867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.497661 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.497818 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.997795957 +0000 UTC m=+41.369570111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.497864 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.498156 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:28.998143846 +0000 UTC m=+41.369918000 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.601082 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.601237 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.10121075 +0000 UTC m=+41.472984904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.601388 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.601701 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.101690344 +0000 UTC m=+41.473464498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.640156 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" event={"ID":"c26cde94-5fa8-4f83-ba18-eef7033f6ec1","Type":"ContainerStarted","Data":"9953d7e17a65dbd5b98730830091badbf3ed7020aa3dad1d9fd89b803648c274"} Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.702448 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.702804 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.202790166 +0000 UTC m=+41.574564320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.803488 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.804215 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.304177175 +0000 UTC m=+41.675951329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.820310 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rs84c"] Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.905345 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.905519 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.405496372 +0000 UTC m=+41.777270526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:28 crc kubenswrapper[4685]: I1202 10:02:28.905590 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:28 crc kubenswrapper[4685]: E1202 10:02:28.905947 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.405939044 +0000 UTC m=+41.777713198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.007073 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.007434 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.507418436 +0000 UTC m=+41.879192590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.109335 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.110476 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.61044184 +0000 UTC m=+41.982216034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.210407 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.210603 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.710554105 +0000 UTC m=+42.082328279 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.211030 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.211593 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.711541281 +0000 UTC m=+42.083315475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.313194 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.313295 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.813275911 +0000 UTC m=+42.185050065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.313650 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.315058 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.815048679 +0000 UTC m=+42.186822833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.416372 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.416907 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:29.916888111 +0000 UTC m=+42.288662265 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.518145 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.518438 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.018427224 +0000 UTC m=+42.390201378 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.619401 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.619622 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.119590737 +0000 UTC m=+42.491364891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.619668 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.620036 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.120025969 +0000 UTC m=+42.491800123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.648276 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hgnrz" event={"ID":"4e159366-bd40-43f3-9b3b-c818913c957a","Type":"ContainerStarted","Data":"92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.650999 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-tkpq4" event={"ID":"096d8145-ede4-4af3-a326-7a1739ca1dc4","Type":"ContainerStarted","Data":"f76bc4253c9cb5866a3fbfaf69bd159fa4de4c9b6644c16fc550e79f1454a6c7"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.652544 4685 generic.go:334] "Generic (PLEG): container finished" podID="c26cde94-5fa8-4f83-ba18-eef7033f6ec1" containerID="9953d7e17a65dbd5b98730830091badbf3ed7020aa3dad1d9fd89b803648c274" exitCode=0 Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.652608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" event={"ID":"c26cde94-5fa8-4f83-ba18-eef7033f6ec1","Type":"ContainerDied","Data":"9953d7e17a65dbd5b98730830091badbf3ed7020aa3dad1d9fd89b803648c274"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.653716 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" event={"ID":"492f18b2-8ffc-4f93-a2de-6bf204e1a4f3","Type":"ContainerStarted","Data":"cc8342ea37bf1ac17a4b332e68c2400ce29725218fe6daa6e5c5b33af652f12e"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.655047 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"330193f4f5a5d77ee95945477c5af9d4884c61234e08d44fdc8711259df017b5"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.656594 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" event={"ID":"984c3c9d-ea5e-42fe-ba8b-e2c4e1bec4e6","Type":"ContainerStarted","Data":"136bea476b75e3bac01edac3e7643c2dd6cd2ef5fd473757d4e7763d842a7846"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.657830 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-m7vnk" event={"ID":"60826f81-d0cb-4339-83b0-d40a5df1aff8","Type":"ContainerStarted","Data":"59e9e0984deee507ae038957ca1e83282aa7db22e6eabbe9f0b717540194e97b"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.659460 4685 generic.go:334] "Generic (PLEG): container finished" podID="0a30ded0-7ce0-4ed4-9e40-9266f1472af4" containerID="81a6315ee71ce31d9a59db8d089a33b7de3a3173fd2ce8b6cf338fedb769a69b" exitCode=0 Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.659512 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" event={"ID":"0a30ded0-7ce0-4ed4-9e40-9266f1472af4","Type":"ContainerDied","Data":"81a6315ee71ce31d9a59db8d089a33b7de3a3173fd2ce8b6cf338fedb769a69b"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.661699 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rs84c" event={"ID":"25987b61-91e8-4fbc-b90f-3a00533ef0b5","Type":"ContainerStarted","Data":"69bb8e786aab01a4a24a855a632f6ed2353de318cac5c83e0940f31b72e2cb44"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.662991 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" event={"ID":"387d4a50-7633-485d-b8d3-ef2afc8c3c97","Type":"ContainerStarted","Data":"cd97bd7d81d37aca5bac8b5ea221ecc10da1b3fdcae30899e413276946f56ef1"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.664077 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" event={"ID":"6651d927-5e58-43b0-8c92-f425f6145e31","Type":"ContainerStarted","Data":"adb15bb6b310e3d2563eb23dee99b358fdfbbdd1cc8fe48d06f244f57cf853e8"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.665710 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" event={"ID":"332dd167-8e86-4071-a277-547ab88bc15d","Type":"ContainerStarted","Data":"78646ab340a7c4ea26e3ea8d5de069efb05a75cbc1f73301105744cb18b00d5c"} Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.720900 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.721133 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.22111855 +0000 UTC m=+42.592892704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.822199 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.822595 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.322577702 +0000 UTC m=+42.694351856 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.923280 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.923456 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.423432076 +0000 UTC m=+42.795206230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:29 crc kubenswrapper[4685]: I1202 10:02:29.923996 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:29 crc kubenswrapper[4685]: E1202 10:02:29.924319 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.424309911 +0000 UTC m=+42.796084055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.024736 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.024941 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.524910589 +0000 UTC m=+42.896684743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.024997 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.025290 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.525277469 +0000 UTC m=+42.897051623 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.126058 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.126270 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.626242387 +0000 UTC m=+42.998016541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.126313 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.126631 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.626617897 +0000 UTC m=+42.998392041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.227509 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.227708 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.727682237 +0000 UTC m=+43.099456391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.227760 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.228052 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.728038597 +0000 UTC m=+43.099812751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.328592 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.328724 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.828699707 +0000 UTC m=+43.200473861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.328761 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.329044 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.829032126 +0000 UTC m=+43.200806270 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.429690 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.430043 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:30.930021474 +0000 UTC m=+43.301795628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.532033 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.532337 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.032326089 +0000 UTC m=+43.404100243 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.632993 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.633162 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.133137073 +0000 UTC m=+43.504911227 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.633307 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.633600 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.133585335 +0000 UTC m=+43.505359489 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.671226 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" event={"ID":"2655a11a-813f-40db-9e51-33122afc460a","Type":"ContainerStarted","Data":"d619b12a90f2f6e38c2b8f2204a841ed005ea0507c1b4be7eba382e7595bd442"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.673020 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" event={"ID":"589484c8-ce97-4e9e-b17a-58af833d9915","Type":"ContainerStarted","Data":"9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.674477 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" event={"ID":"4135e4b9-a4ab-4961-8dfa-7c0e83fa1970","Type":"ContainerStarted","Data":"b3c1d4155faae049d1117f3cf12eaf8bd5c80458370f7f3c38a0dd2129d884f0"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.675832 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" event={"ID":"cea96603-d7a1-4b75-9c91-612fd0744351","Type":"ContainerStarted","Data":"319def4c1a49b42f21a5dfaf29e8d85b4100ccc57255a3d8d1cbb086a8f0c479"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.677505 4685 generic.go:334] "Generic (PLEG): container finished" podID="6651d927-5e58-43b0-8c92-f425f6145e31" containerID="adb15bb6b310e3d2563eb23dee99b358fdfbbdd1cc8fe48d06f244f57cf853e8" exitCode=0 Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.677619 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" event={"ID":"6651d927-5e58-43b0-8c92-f425f6145e31","Type":"ContainerDied","Data":"adb15bb6b310e3d2563eb23dee99b358fdfbbdd1cc8fe48d06f244f57cf853e8"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.678719 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" event={"ID":"79c39ecf-eebd-46cd-8b34-768197b3e2ac","Type":"ContainerStarted","Data":"fcfc95e892c1779006a347a9cdd1bc8adc6f6ff322c19bfe25c9e8dfb4a461bf"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.682070 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" event={"ID":"d1b66142-28fd-4d6c-b21e-7cc55e1d1672","Type":"ContainerStarted","Data":"da6027a042f5b91a7de07250a998a04966284782cf03c773716d28ad34732cc5"} Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.733767 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.733940 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.233898625 +0000 UTC m=+43.605672779 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.734003 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.734301 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.234290636 +0000 UTC m=+43.606064790 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.835161 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.835509 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.33549387 +0000 UTC m=+43.707268024 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:30 crc kubenswrapper[4685]: I1202 10:02:30.936903 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:30 crc kubenswrapper[4685]: E1202 10:02:30.937258 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.437243829 +0000 UTC m=+43.809017993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.038438 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.038731 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.538700881 +0000 UTC m=+43.910475035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.039083 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.039428 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.53941303 +0000 UTC m=+43.911187184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.139858 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.140310 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.640291976 +0000 UTC m=+44.012066130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.241816 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.242151 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.742137327 +0000 UTC m=+44.113911481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.342396 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.342771 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.842751005 +0000 UTC m=+44.214525149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.444301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.444672 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:31.944657119 +0000 UTC m=+44.316431273 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.545836 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.546124 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.046109931 +0000 UTC m=+44.417884085 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.646998 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.647377 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.147360866 +0000 UTC m=+44.519135020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.686650 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" event={"ID":"153dc8a6-429a-40f9-abb1-3c3a8388c018","Type":"ContainerStarted","Data":"6523f3f46e744d8dfcb632c97571c9e5d1ff3b13f40af1739b1676481a3e42cb"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.687773 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" event={"ID":"aa6c0d43-3b58-41f4-bf4b-d9765bde5c98","Type":"ContainerStarted","Data":"686e0e64f7e0fc70ae1e1c2c92e997d7afe0e3c5c52dc680d977c2376c0372c6"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.688892 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"28a3f34c3992f58a55ec2a2971c7acb7e5e8cce81e27223731119634e3853b94"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.689944 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" event={"ID":"b1d639aa-af8d-40c6-bd5a-6543735cce4e","Type":"ContainerStarted","Data":"b7eb0603516e024e006c97c5e0b8420598a016ffa4ddbb6125c9bfa81f6c2c03"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.691029 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" event={"ID":"6136cbbe-cd55-4ba7-829a-621906a4b7b7","Type":"ContainerStarted","Data":"38cff269f1ff9d5e9421af972c471c48fceee914d7c765a64f601ed13f2d8117"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.692022 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" event={"ID":"370d38c8-640e-44bd-a095-75fb04be6320","Type":"ContainerStarted","Data":"fc3a9529c19e07b8d80cc79c192cd0d2e55ee4a14930a3ee7094eb774549da27"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.692927 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" event={"ID":"aea099c8-3890-4ce5-9965-10f136542d87","Type":"ContainerStarted","Data":"fa1b3dbabad9e0c5c5e29c4626cfa2d220572edfc285f2c1a3e8c4ad94ee1af8"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.693905 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" event={"ID":"8e3956cb-a0a9-4add-862c-b7facbcbf400","Type":"ContainerStarted","Data":"7b26b677d9eee636105bc23fbdde1c7b25cbae215efc49fd955dec681cb6fff0"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.694969 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e95afda6a828ee08e799a7f9e61d63992fdda998a95d787a3a041e77e64ef198"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.696005 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" event={"ID":"2dee0464-6eab-44a6-a33d-3b6096319ecf","Type":"ContainerStarted","Data":"095966827d6b02a4d7c27ecbbcda3626fb95f23573ce5ab3cdd0076fe68a8bec"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.698606 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-269n4" event={"ID":"de06fcee-1542-419a-87ae-cb854b6b4a5e","Type":"ContainerStarted","Data":"7ad074b3eaafda768fbbf6fb964aefcb3dfd419b2d6140f5d56d5a3dc0d3aa2d"} Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.747876 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.748129 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.248115208 +0000 UTC m=+44.619889362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.851735 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.852328 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.352314184 +0000 UTC m=+44.724088338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:31 crc kubenswrapper[4685]: I1202 10:02:31.953708 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:31 crc kubenswrapper[4685]: E1202 10:02:31.953905 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.453878388 +0000 UTC m=+44.825652542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.055292 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.055687 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.555669429 +0000 UTC m=+44.927443583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.156233 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.156405 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.65637945 +0000 UTC m=+45.028153604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.156446 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.156784 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.656775731 +0000 UTC m=+45.028549875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.256904 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.257077 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.757053459 +0000 UTC m=+45.128827613 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.257215 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.257511 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.757500132 +0000 UTC m=+45.129274286 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.358360 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.358716 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.858700976 +0000 UTC m=+45.230475130 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.460064 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.460928 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:32.960911468 +0000 UTC m=+45.332685632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.561928 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.562343 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.062320557 +0000 UTC m=+45.434094711 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.663662 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.664062 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.164044647 +0000 UTC m=+45.535818801 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.720975 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" event={"ID":"ab49ef86-a462-4609-aaf7-30d74883b8a9","Type":"ContainerStarted","Data":"10df85ef79ee52efa57d710c026b1497fb5fc7b09ee59fc78fd96367269b0a30"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.744126 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" event={"ID":"09cfde35-3539-4f7e-8985-9e0cd922f6e8","Type":"ContainerStarted","Data":"72ae218b75584e0d1a782ad1d3a57cfeaa725bcf90d74b24fd58fea6c1b76024"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.766657 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" event={"ID":"c69d86c9-b765-4489-ab56-d93126192812","Type":"ContainerStarted","Data":"d6cec0264c6b43ba51301472d2febd122880c2121658a0b7c018a5bfec29a79e"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.767182 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.767532 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.267512963 +0000 UTC m=+45.639287117 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.792226 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" event={"ID":"8d3c08e9-d369-4369-b3f1-a36b85a49b39","Type":"ContainerStarted","Data":"2955007599d0f674246153adb9c96a44185db2b2fa63ac7a6c9aed31b77706fc"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.870918 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.872101 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.372090129 +0000 UTC m=+45.743864283 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.873413 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" event={"ID":"70ecf800-4f19-4a60-834f-d72f0a1d2d37","Type":"ContainerStarted","Data":"385f6748d99b004288c17fe28dad33de35da2acac3573896471a51f03151f5bd"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.894817 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" event={"ID":"79128f87-7854-4a71-82cb-6c6f7cd51e8c","Type":"ContainerStarted","Data":"284321efb4c6719baf3a501f8cc08e6beec9f807c8c779765b83f1db33f17398"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.895685 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.899812 4685 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hz9nf container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" start-of-body= Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.899852 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" podUID="79128f87-7854-4a71-82cb-6c6f7cd51e8c" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.43:8443/healthz\": dial tcp 10.217.0.43:8443: connect: connection refused" Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.964385 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" event={"ID":"e7b00c8d-9311-4164-95fd-15c6564d7cf0","Type":"ContainerStarted","Data":"50984e15a3db995c25f1abde5edd6a298aa9cf5c7658fe17a2d01e108e00f432"} Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.976016 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:32 crc kubenswrapper[4685]: E1202 10:02:32.976907 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.476892751 +0000 UTC m=+45.848666895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.996313 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-n64h8" podStartSLOduration=26.996294339 podStartE2EDuration="26.996294339s" podCreationTimestamp="2025-12-02 10:02:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:32.834180076 +0000 UTC m=+45.205954230" watchObservedRunningTime="2025-12-02 10:02:32.996294339 +0000 UTC m=+45.368068493" Dec 02 10:02:32 crc kubenswrapper[4685]: I1202 10:02:32.997874 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rpwnx" podStartSLOduration=24.997867522 podStartE2EDuration="24.997867522s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:32.995352443 +0000 UTC m=+45.367126597" watchObservedRunningTime="2025-12-02 10:02:32.997867522 +0000 UTC m=+45.369641676" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.036901 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vwpld" event={"ID":"aa1aa5a6-03c6-4256-93e4-6000b78f4481","Type":"ContainerStarted","Data":"7602ecad05dbc3556813ad8fb4157001956ef5bb753a530ceed0bc3d1a30a10c"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.038998 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" event={"ID":"71660d3a-ef68-4dc4-9d4f-bdae6372f2ec","Type":"ContainerStarted","Data":"02c7d4f44613503572c1e71f512239d0181b66185ae28884a8abba442ff52955"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.053405 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" event={"ID":"ae0fbaf9-ce34-4978-8d6b-6f40e1597c91","Type":"ContainerStarted","Data":"104a3e2efd4b75992d0a5fca282957f829f3100b2655b8a35f1e8d08e953fd7a"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.054333 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.055661 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mkjcc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" start-of-body= Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.055705 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" podUID="ae0fbaf9-ce34-4978-8d6b-6f40e1597c91" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.062692 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" event={"ID":"4cea16e1-b06c-49e3-bc27-791409603cf2","Type":"ContainerStarted","Data":"9b3ae00e939193afb511e6d08fe7964758f280a05c10c3ca4425da8838161ebe"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.064400 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rs84c" event={"ID":"25987b61-91e8-4fbc-b90f-3a00533ef0b5","Type":"ContainerStarted","Data":"ce9a0350f0ae78c152a4e566ebccfc2acd5caf2308f4d76a996862256fb86682"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.065645 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" event={"ID":"20083215-8e96-41e3-b76f-68dd87550bc6","Type":"ContainerStarted","Data":"f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.066046 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.077754 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.078528 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.578516527 +0000 UTC m=+45.950290671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.087773 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" event={"ID":"12f6ce83-cb37-4e64-92c4-fb96aea3b213","Type":"ContainerStarted","Data":"1cea13510c9034a5f4eb0f438857b548939c8c03d1bf9c95d0989122af9b8e22"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.108168 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" event={"ID":"7def2210-de93-4381-84dd-fe0d507f76b3","Type":"ContainerStarted","Data":"726fbe8b86c5da91b6b34c6facc006767c92adcaa568cd3d60030a53a1f4d9dd"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.152131 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" event={"ID":"cb33fcfc-73bb-4485-928f-0986d4734b50","Type":"ContainerStarted","Data":"0ac192c429edee0771cc4725b5ae3c2966aa71fe10b2022bb76fc4525d8baaf4"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.180259 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.180772 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.680752359 +0000 UTC m=+46.052526523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.204771 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" event={"ID":"3445549e-e348-450c-982f-1456d6d029d0","Type":"ContainerStarted","Data":"6c5454b785039246441d8c0c22004f81cfc8abbd4a67a33677bfb9232cf230b7"} Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.209681 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.210027 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.210044 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.210056 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.211610 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.216046 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xf986 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.216090 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.217417 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.226940 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.236448 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.247261 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.252018 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.252147 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.286847 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.287176 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.787161505 +0000 UTC m=+46.158935659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.317739 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" podStartSLOduration=25.317718457 podStartE2EDuration="25.317718457s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:33.215739782 +0000 UTC m=+45.587513926" watchObservedRunningTime="2025-12-02 10:02:33.317718457 +0000 UTC m=+45.689492621" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.388272 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.391091 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.891076493 +0000 UTC m=+46.262850647 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.424766 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.435050 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:33 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:33 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:33 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.435102 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.491880 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.492300 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:33.992266907 +0000 UTC m=+46.364041061 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.594110 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.594349 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.094334826 +0000 UTC m=+46.466108980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.653373 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" podStartSLOduration=26.647560994 podStartE2EDuration="26.647560994s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:33.645334414 +0000 UTC m=+46.017108568" watchObservedRunningTime="2025-12-02 10:02:33.647560994 +0000 UTC m=+46.019335148" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.697286 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.697629 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.197615307 +0000 UTC m=+46.569389461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.705075 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.708744 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-m7vnk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.708807 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m7vnk" podUID="60826f81-d0cb-4339-83b0-d40a5df1aff8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.708889 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-m7vnk container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.708903 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-m7vnk" podUID="60826f81-d0cb-4339-83b0-d40a5df1aff8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.724062 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.724098 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.730697 4685 patch_prober.go:28] interesting pod/console-f9d7485db-hgnrz container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.730753 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hgnrz" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.809584 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.309549633 +0000 UTC m=+46.681323787 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.809685 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.810001 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.810273 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.310260982 +0000 UTC m=+46.682035146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:33 crc kubenswrapper[4685]: I1202 10:02:33.915160 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:33 crc kubenswrapper[4685]: E1202 10:02:33.915605 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.415582259 +0000 UTC m=+46.787356423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.017121 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.017820 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.517806361 +0000 UTC m=+46.889580515 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.120896 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.121228 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.621209085 +0000 UTC m=+46.992983239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.180589 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-tkpq4" podStartSLOduration=27.18054824 podStartE2EDuration="27.18054824s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.140801089 +0000 UTC m=+46.512575253" watchObservedRunningTime="2025-12-02 10:02:34.18054824 +0000 UTC m=+46.552322404" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.183485 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-cx6sh" podStartSLOduration=27.18347451 podStartE2EDuration="27.18347451s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:33.960666826 +0000 UTC m=+46.332440990" watchObservedRunningTime="2025-12-02 10:02:34.18347451 +0000 UTC m=+46.555248664" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.221941 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.222322 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.722304787 +0000 UTC m=+47.094078941 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.227184 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" event={"ID":"6136cbbe-cd55-4ba7-829a-621906a4b7b7","Type":"ContainerStarted","Data":"60111e95ff51ed3d9eddc9648b303795cefed48d9cf34ff4e831c3ce598d0d70"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.230734 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vwpld" event={"ID":"aa1aa5a6-03c6-4256-93e4-6000b78f4481","Type":"ContainerStarted","Data":"07194b7b7957f24da8f2c568c13a270faed724397e75d49baa5a3569aefb51a9"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.231250 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.246104 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" event={"ID":"c26cde94-5fa8-4f83-ba18-eef7033f6ec1","Type":"ContainerStarted","Data":"1ee49a5cb8491cc95230ebcf899e196d3d2bd71f09099af3344bb329c6105014"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.246655 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.266166 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" event={"ID":"79c39ecf-eebd-46cd-8b34-768197b3e2ac","Type":"ContainerStarted","Data":"b7a49dffa8b2285a50f6caef646323c8f8e9c67c8c50d77d59351a0576e52586"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.266861 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.277615 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" event={"ID":"8d3c08e9-d369-4369-b3f1-a36b85a49b39","Type":"ContainerStarted","Data":"0470b4df19357c19c7cca04b4f755b42b62eda394656dae1417e3bc64dd4f21c"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.280070 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" event={"ID":"12f6ce83-cb37-4e64-92c4-fb96aea3b213","Type":"ContainerStarted","Data":"124d43485bb6a0a834baabcb950e32b8749156f61cfc33da4ab14140535a41db"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.285871 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rs84c" event={"ID":"25987b61-91e8-4fbc-b90f-3a00533ef0b5","Type":"ContainerStarted","Data":"0a8df708445bc3b666fa31707efdf4884555ea51ef09947d252c57c1a8290128"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.305892 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" event={"ID":"e7b00c8d-9311-4164-95fd-15c6564d7cf0","Type":"ContainerStarted","Data":"6837e3af48de0a75393118ef1dd0a0bb04febdb06622be60bbda7e04327c3eee"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.322936 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.323309 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.823291286 +0000 UTC m=+47.195065450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.333091 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-nvrwh" podStartSLOduration=27.333075752 podStartE2EDuration="27.333075752s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.284171771 +0000 UTC m=+46.655945925" watchObservedRunningTime="2025-12-02 10:02:34.333075752 +0000 UTC m=+46.704849906" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.333926 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" event={"ID":"4cea16e1-b06c-49e3-bc27-791409603cf2","Type":"ContainerStarted","Data":"cc737533b4bb2d52715f801284f59489d282eefd8f2372a48188262bac4d487c"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.365810 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" event={"ID":"aa6c0d43-3b58-41f4-bf4b-d9765bde5c98","Type":"ContainerStarted","Data":"d76518021e1db14ebdcbe47fbb6ab9e2067015300f5b01aa71897a44ce843016"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.378060 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" podStartSLOduration=27.378020295 podStartE2EDuration="27.378020295s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.333005549 +0000 UTC m=+46.704779703" watchObservedRunningTime="2025-12-02 10:02:34.378020295 +0000 UTC m=+46.749794449" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.379759 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cm7qg" podStartSLOduration=26.379747832 podStartE2EDuration="26.379747832s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.376493683 +0000 UTC m=+46.748267838" watchObservedRunningTime="2025-12-02 10:02:34.379747832 +0000 UTC m=+46.751521986" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.405395 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" event={"ID":"2655a11a-813f-40db-9e51-33122afc460a","Type":"ContainerStarted","Data":"eaae23df150f6c73894f3e0d6addd9987becf27b3d5f9abb90cac40fb81f04a4"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.424194 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.424714 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.425214 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:34.925196989 +0000 UTC m=+47.296971213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.430718 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" event={"ID":"2dee0464-6eab-44a6-a33d-3b6096319ecf","Type":"ContainerStarted","Data":"6f42932031024bde5cde9c1e09bee4051815dd64cfa27dbe8004938ce27e00eb"} Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.432824 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-m7vnk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.432860 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m7vnk" podUID="60826f81-d0cb-4339-83b0-d40a5df1aff8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.432970 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:34 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:34 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:34 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.432986 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.433771 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xf986 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.433824 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.491365 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" podStartSLOduration=27.49135016 podStartE2EDuration="27.49135016s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.49099069 +0000 UTC m=+46.862764844" watchObservedRunningTime="2025-12-02 10:02:34.49135016 +0000 UTC m=+46.863124314" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.491789 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-qd6h4" podStartSLOduration=28.491783011 podStartE2EDuration="28.491783011s" podCreationTimestamp="2025-12-02 10:02:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.423062032 +0000 UTC m=+46.794836196" watchObservedRunningTime="2025-12-02 10:02:34.491783011 +0000 UTC m=+46.863557165" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.513382 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz9nf" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.527058 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.528182 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.028167392 +0000 UTC m=+47.399941546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.588769 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-74mhp" podStartSLOduration=26.588753141 podStartE2EDuration="26.588753141s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.586697095 +0000 UTC m=+46.958471249" watchObservedRunningTime="2025-12-02 10:02:34.588753141 +0000 UTC m=+46.960527295" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.629512 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.631427 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.131411812 +0000 UTC m=+47.503185986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.652071 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" podStartSLOduration=26.652044493 podStartE2EDuration="26.652044493s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.639529583 +0000 UTC m=+47.011303737" watchObservedRunningTime="2025-12-02 10:02:34.652044493 +0000 UTC m=+47.023818667" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.730813 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.731168 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.231149076 +0000 UTC m=+47.602923230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.763536 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-t4sm7" podStartSLOduration=28.763516057 podStartE2EDuration="28.763516057s" podCreationTimestamp="2025-12-02 10:02:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.71291663 +0000 UTC m=+47.084690784" watchObservedRunningTime="2025-12-02 10:02:34.763516057 +0000 UTC m=+47.135290211" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.764519 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-m7vnk" podStartSLOduration=27.764511995 podStartE2EDuration="27.764511995s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.75298492 +0000 UTC m=+47.124759074" watchObservedRunningTime="2025-12-02 10:02:34.764511995 +0000 UTC m=+47.136286149" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.833268 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.833663 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.333649966 +0000 UTC m=+47.705424120 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.861934 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.864903 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podStartSLOduration=13.864891066 podStartE2EDuration="13.864891066s" podCreationTimestamp="2025-12-02 10:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.864049243 +0000 UTC m=+47.235823397" watchObservedRunningTime="2025-12-02 10:02:34.864891066 +0000 UTC m=+47.236665220" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.888819 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.937094 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:34 crc kubenswrapper[4685]: E1202 10:02:34.937773 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.437756409 +0000 UTC m=+47.809530563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.938824 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-wkzmt" podStartSLOduration=27.938804328 podStartE2EDuration="27.938804328s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.936908236 +0000 UTC m=+47.308682410" watchObservedRunningTime="2025-12-02 10:02:34.938804328 +0000 UTC m=+47.310578482" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.997141 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xf986 container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.997417 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.997986 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xf986 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Dec 02 10:02:34 crc kubenswrapper[4685]: I1202 10:02:34.998101 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.000003 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-hwkfb" podStartSLOduration=26.999992213 podStartE2EDuration="26.999992213s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.998242925 +0000 UTC m=+47.370017089" watchObservedRunningTime="2025-12-02 10:02:34.999992213 +0000 UTC m=+47.371766367" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.002436 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" podStartSLOduration=27.002421319 podStartE2EDuration="27.002421319s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:34.966830811 +0000 UTC m=+47.338604965" watchObservedRunningTime="2025-12-02 10:02:35.002421319 +0000 UTC m=+47.374195473" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.039510 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.039947 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.53993009 +0000 UTC m=+47.911704244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.140959 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.141146 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.641113934 +0000 UTC m=+48.012888088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.142491 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.142821 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.642813341 +0000 UTC m=+48.014587495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.200945 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6kndg"] Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.201811 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.203839 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.212657 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6kndg"] Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.243720 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.243907 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.743881221 +0000 UTC m=+48.115655375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.244313 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.244832 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.744817286 +0000 UTC m=+48.116591440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.286363 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.286966 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.295283 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.295408 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.298061 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.345502 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.345724 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dvbl\" (UniqueName: \"kubernetes.io/projected/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-kube-api-access-4dvbl\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.345791 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-catalog-content\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.345813 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-utilities\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.345932 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.845917858 +0000 UTC m=+48.217692012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.429252 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:35 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:35 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:35 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.429315 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.430891 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mkjcc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.430926 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" podUID="ae0fbaf9-ce34-4978-8d6b-6f40e1597c91" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.437223 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" event={"ID":"6651d927-5e58-43b0-8c92-f425f6145e31","Type":"ContainerStarted","Data":"d4d3ee778ac05b70d46afc778e8a86b355ab55705619b8320b9b89d1c75c1502"} Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.439205 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" event={"ID":"0a30ded0-7ce0-4ed4-9e40-9266f1472af4","Type":"ContainerStarted","Data":"25fd5e85f26d34e082101bb85c8f84983bb2e493b21a50cd074be9bef3a88430"} Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447343 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447391 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447430 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-catalog-content\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447453 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-utilities\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447492 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447513 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dvbl\" (UniqueName: \"kubernetes.io/projected/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-kube-api-access-4dvbl\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.447782 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:35.947763649 +0000 UTC m=+48.319537793 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447900 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-utilities\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.447903 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-catalog-content\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.465937 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dvbl\" (UniqueName: \"kubernetes.io/projected/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-kube-api-access-4dvbl\") pod \"community-operators-6kndg\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.515679 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.548845 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.548996 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.048972645 +0000 UTC m=+48.420746799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.549801 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.550958 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.552728 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.554192 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.554673 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.054659759 +0000 UTC m=+48.426433983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.572878 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.599288 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.645037 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8vsrk"] Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.647865 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.659362 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.659851 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.159834281 +0000 UTC m=+48.531608435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.671706 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8vsrk"] Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.676647 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-nr46b" podStartSLOduration=27.676622918 podStartE2EDuration="27.676622918s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.674833509 +0000 UTC m=+48.046607663" watchObservedRunningTime="2025-12-02 10:02:35.676622918 +0000 UTC m=+48.048397072" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.703001 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9rnjd" podStartSLOduration=28.702982296 podStartE2EDuration="28.702982296s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.70055478 +0000 UTC m=+48.072328924" watchObservedRunningTime="2025-12-02 10:02:35.702982296 +0000 UTC m=+48.074756450" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.738919 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" podStartSLOduration=27.738903063 podStartE2EDuration="27.738903063s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.738903353 +0000 UTC m=+48.110677517" watchObservedRunningTime="2025-12-02 10:02:35.738903063 +0000 UTC m=+48.110677217" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.764996 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-hgnrz" podStartSLOduration=28.764977193 podStartE2EDuration="28.764977193s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.76484988 +0000 UTC m=+48.136624034" watchObservedRunningTime="2025-12-02 10:02:35.764977193 +0000 UTC m=+48.136751347" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.765420 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9znt\" (UniqueName: \"kubernetes.io/projected/780a97cc-9fca-4b67-a956-8be8a6eb3d08-kube-api-access-p9znt\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.765464 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.765493 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-catalog-content\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.765534 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-utilities\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.765817 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.265806766 +0000 UTC m=+48.637580920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.804447 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-74vzt" podStartSLOduration=27.804430537000002 podStartE2EDuration="27.804430537s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.803840841 +0000 UTC m=+48.175614995" watchObservedRunningTime="2025-12-02 10:02:35.804430537 +0000 UTC m=+48.176204691" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.858320 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" podStartSLOduration=28.858301373 podStartE2EDuration="28.858301373s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.858268332 +0000 UTC m=+48.230042476" watchObservedRunningTime="2025-12-02 10:02:35.858301373 +0000 UTC m=+48.230075527" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.867027 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.867213 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9znt\" (UniqueName: \"kubernetes.io/projected/780a97cc-9fca-4b67-a956-8be8a6eb3d08-kube-api-access-p9znt\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.867253 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-catalog-content\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.867293 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-utilities\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.867686 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-utilities\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.867739 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.367717659 +0000 UTC m=+48.739491873 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.867887 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-catalog-content\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.881629 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-269n4" podStartSLOduration=14.881611797 podStartE2EDuration="14.881611797s" podCreationTimestamp="2025-12-02 10:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.878932104 +0000 UTC m=+48.250706258" watchObservedRunningTime="2025-12-02 10:02:35.881611797 +0000 UTC m=+48.253385951" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.898385 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9znt\" (UniqueName: \"kubernetes.io/projected/780a97cc-9fca-4b67-a956-8be8a6eb3d08-kube-api-access-p9znt\") pod \"community-operators-8vsrk\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.913121 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" podStartSLOduration=27.913106135 podStartE2EDuration="27.913106135s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.912012665 +0000 UTC m=+48.283786819" watchObservedRunningTime="2025-12-02 10:02:35.913106135 +0000 UTC m=+48.284880289" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.967003 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sxwtf" podStartSLOduration=27.966988121 podStartE2EDuration="27.966988121s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.961864072 +0000 UTC m=+48.333638226" watchObservedRunningTime="2025-12-02 10:02:35.966988121 +0000 UTC m=+48.338762275" Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.968957 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:35 crc kubenswrapper[4685]: E1202 10:02:35.969325 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.469311795 +0000 UTC m=+48.841085949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:35 crc kubenswrapper[4685]: I1202 10:02:35.993680 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-cm2tb" podStartSLOduration=27.993662347 podStartE2EDuration="27.993662347s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:35.991467888 +0000 UTC m=+48.363242042" watchObservedRunningTime="2025-12-02 10:02:35.993662347 +0000 UTC m=+48.365436511" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.004086 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.010436 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6kndg"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.018551 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-96v4q"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.019635 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.025609 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.070694 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.071131 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.571111265 +0000 UTC m=+48.942885419 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.109024 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-96v4q"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.166372 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-5glrx" podStartSLOduration=28.166355037 podStartE2EDuration="28.166355037s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.160326823 +0000 UTC m=+48.532100977" watchObservedRunningTime="2025-12-02 10:02:36.166355037 +0000 UTC m=+48.538129191" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.176317 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-catalog-content\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.176359 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-utilities\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.176387 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5l4l\" (UniqueName: \"kubernetes.io/projected/2c40690c-d875-4eef-9c0c-0a174aa43ca8-kube-api-access-z5l4l\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.176433 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.176724 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.67671288 +0000 UTC m=+49.048487034 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.239662 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mkjcc container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.239735 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" podUID="ae0fbaf9-ce34-4978-8d6b-6f40e1597c91" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.278178 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.278345 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-catalog-content\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.278377 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-utilities\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.278412 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5l4l\" (UniqueName: \"kubernetes.io/projected/2c40690c-d875-4eef-9c0c-0a174aa43ca8-kube-api-access-z5l4l\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.278780 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.778760677 +0000 UTC m=+49.150534841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.279653 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-catalog-content\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.279882 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-utilities\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.317511 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5l4l\" (UniqueName: \"kubernetes.io/projected/2c40690c-d875-4eef-9c0c-0a174aa43ca8-kube-api-access-z5l4l\") pod \"certified-operators-96v4q\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.346265 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.379355 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.379682 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.879670453 +0000 UTC m=+49.251444607 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.390421 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-vwpld" podStartSLOduration=15.390403315 podStartE2EDuration="15.390403315s" podCreationTimestamp="2025-12-02 10:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.316919856 +0000 UTC m=+48.688694020" watchObservedRunningTime="2025-12-02 10:02:36.390403315 +0000 UTC m=+48.762177469" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.431024 4685 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mkjcc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.431065 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" podUID="ae0fbaf9-ce34-4978-8d6b-6f40e1597c91" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.431782 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-6mj6f" podStartSLOduration=29.431773271 podStartE2EDuration="29.431773271s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.391800473 +0000 UTC m=+48.763574627" watchObservedRunningTime="2025-12-02 10:02:36.431773271 +0000 UTC m=+48.803547425" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.432023 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p94sb"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.432898 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.447169 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:36 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:36 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:36 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.447225 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.447613 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rs84c" podStartSLOduration=29.447595891 podStartE2EDuration="29.447595891s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.447504179 +0000 UTC m=+48.819278333" watchObservedRunningTime="2025-12-02 10:02:36.447595891 +0000 UTC m=+48.819370065" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.480265 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.480702 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:36.980687172 +0000 UTC m=+49.352461326 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.525005 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p94sb"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.530320 4685 generic.go:334] "Generic (PLEG): container finished" podID="7def2210-de93-4381-84dd-fe0d507f76b3" containerID="726fbe8b86c5da91b6b34c6facc006767c92adcaa568cd3d60030a53a1f4d9dd" exitCode=0 Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.530365 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" event={"ID":"7def2210-de93-4381-84dd-fe0d507f76b3","Type":"ContainerDied","Data":"726fbe8b86c5da91b6b34c6facc006767c92adcaa568cd3d60030a53a1f4d9dd"} Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.542135 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" event={"ID":"6651d927-5e58-43b0-8c92-f425f6145e31","Type":"ContainerStarted","Data":"dde0ec1f63a609dcecfb40f3c722e0bbe1cb843e7a0aba99fed1c540f2605186"} Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.543172 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerStarted","Data":"45be23419db0db4e27fd1b3cbcff5b1e08ac822306b5bb672f5daa412013646f"} Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.586926 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qtk7\" (UniqueName: \"kubernetes.io/projected/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-kube-api-access-2qtk7\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.586980 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.587033 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-utilities\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.587059 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-catalog-content\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.587987 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.087976902 +0000 UTC m=+49.459751056 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.598092 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" event={"ID":"c69d86c9-b765-4489-ab56-d93126192812","Type":"ContainerStarted","Data":"ae4a44da146adf22433e26f038516628ed8ae7c578ef824b4a9c27972c6de1e4"} Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.605209 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mkjcc" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.654172 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-drdhq" podStartSLOduration=28.654157244 podStartE2EDuration="28.654157244s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.652245481 +0000 UTC m=+49.024019635" watchObservedRunningTime="2025-12-02 10:02:36.654157244 +0000 UTC m=+49.025931398" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.688428 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.688753 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qtk7\" (UniqueName: \"kubernetes.io/projected/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-kube-api-access-2qtk7\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.688840 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-utilities\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.688913 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-catalog-content\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.689282 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-catalog-content\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.689343 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.189328391 +0000 UTC m=+49.561102545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.691098 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-utilities\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.792289 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.792664 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.292643414 +0000 UTC m=+49.664417568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.827298 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qtk7\" (UniqueName: \"kubernetes.io/projected/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-kube-api-access-2qtk7\") pod \"certified-operators-p94sb\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.830222 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.893233 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:36 crc kubenswrapper[4685]: E1202 10:02:36.893660 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.393640082 +0000 UTC m=+49.765414246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.920987 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-kj84m" podStartSLOduration=28.920966505 podStartE2EDuration="28.920966505s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.827725308 +0000 UTC m=+49.199499462" watchObservedRunningTime="2025-12-02 10:02:36.920966505 +0000 UTC m=+49.292740659" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.928454 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" podStartSLOduration=29.928433959 podStartE2EDuration="29.928433959s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.922622391 +0000 UTC m=+49.294396555" watchObservedRunningTime="2025-12-02 10:02:36.928433959 +0000 UTC m=+49.300208113" Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.929436 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.949420 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-96htj"] Dec 02 10:02:36 crc kubenswrapper[4685]: I1202 10:02:36.949654 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" gracePeriod=30 Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:36.998645 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:36.998948 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.498937768 +0000 UTC m=+49.870711922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.034369 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" podStartSLOduration=30.034353782 podStartE2EDuration="30.034353782s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:36.976131437 +0000 UTC m=+49.347905591" watchObservedRunningTime="2025-12-02 10:02:37.034353782 +0000 UTC m=+49.406127936" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.067904 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rtwwd" podStartSLOduration=29.067886005 podStartE2EDuration="29.067886005s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:37.03685038 +0000 UTC m=+49.408624534" watchObservedRunningTime="2025-12-02 10:02:37.067886005 +0000 UTC m=+49.439660159" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.100049 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.100431 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.600415959 +0000 UTC m=+49.972190113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.110589 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-jmz9s" podStartSLOduration=30.110551696 podStartE2EDuration="30.110551696s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:37.070157267 +0000 UTC m=+49.441931421" watchObservedRunningTime="2025-12-02 10:02:37.110551696 +0000 UTC m=+49.482325850" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.190303 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" podStartSLOduration=29.190286635 podStartE2EDuration="29.190286635s" podCreationTimestamp="2025-12-02 10:02:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:37.187874461 +0000 UTC m=+49.559648615" watchObservedRunningTime="2025-12-02 10:02:37.190286635 +0000 UTC m=+49.562060789" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.206227 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.206528 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.706516167 +0000 UTC m=+50.078290321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.299807 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8vsrk"] Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.309020 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.309642 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.809624144 +0000 UTC m=+50.181398298 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: W1202 10:02:37.316001 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod780a97cc_9fca_4b67_a956_8be8a6eb3d08.slice/crio-6f64821112ec03f11188b66586fee1b7b2a3a5fa246c4528d3ce8f6bc3ebe2ef WatchSource:0}: Error finding container 6f64821112ec03f11188b66586fee1b7b2a3a5fa246c4528d3ce8f6bc3ebe2ef: Status 404 returned error can't find the container with id 6f64821112ec03f11188b66586fee1b7b2a3a5fa246c4528d3ce8f6bc3ebe2ef Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.410624 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.410922 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:37.91091044 +0000 UTC m=+50.282684594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.464748 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:37 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:37 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:37 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.464797 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.512049 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.512516 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.012502385 +0000 UTC m=+50.384276539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.565240 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-96v4q"] Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.610122 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" podStartSLOduration=30.610103402 podStartE2EDuration="30.610103402s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:37.593601192 +0000 UTC m=+49.965375356" watchObservedRunningTime="2025-12-02 10:02:37.610103402 +0000 UTC m=+49.981877556" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.614391 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.617784 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.11775744 +0000 UTC m=+50.489531594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.636219 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8vsrk" event={"ID":"780a97cc-9fca-4b67-a956-8be8a6eb3d08","Type":"ContainerStarted","Data":"6f64821112ec03f11188b66586fee1b7b2a3a5fa246c4528d3ce8f6bc3ebe2ef"} Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.637963 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zj8bb"] Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.639244 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.646317 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerDied","Data":"87bebf18c72969c2a6510f94fe9950ee5d50af692f0fbf02f9cc037e8fbe7712"} Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.646264 4685 generic.go:334] "Generic (PLEG): container finished" podID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerID="87bebf18c72969c2a6510f94fe9950ee5d50af692f0fbf02f9cc037e8fbe7712" exitCode=0 Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.647172 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.650440 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj8bb"] Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.656722 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5c2625f0-cd02-453e-bd74-8a7ba8937ea7","Type":"ContainerStarted","Data":"e9ff156120185c7736e2de9b5986b22128a5091f115f5974078c292869195659"} Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.670685 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.675472 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" event={"ID":"c69d86c9-b765-4489-ab56-d93126192812","Type":"ContainerStarted","Data":"ffaad089ff1d71ba78a6df1ce85a08c84d12878d95b8e4c91349e0248408569d"} Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.687855 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96v4q" event={"ID":"2c40690c-d875-4eef-9c0c-0a174aa43ca8","Type":"ContainerStarted","Data":"d2ab7d04700ce9e186ee1d56614b7c2db072203ae041bf62f324a0446b34323a"} Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.721410 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.221391651 +0000 UTC m=+50.593165805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.721448 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.721599 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-utilities\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.721668 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.721699 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsk4t\" (UniqueName: \"kubernetes.io/projected/4eddbb2b-7286-4117-97fd-f915638d19cd-kube-api-access-hsk4t\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.721723 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-catalog-content\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.722464 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.222456909 +0000 UTC m=+50.594231063 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.822771 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.823090 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsk4t\" (UniqueName: \"kubernetes.io/projected/4eddbb2b-7286-4117-97fd-f915638d19cd-kube-api-access-hsk4t\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.823128 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-catalog-content\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.823192 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-utilities\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.823967 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.323952593 +0000 UTC m=+50.695726737 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.824635 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-catalog-content\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.824943 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-utilities\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.864953 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsk4t\" (UniqueName: \"kubernetes.io/projected/4eddbb2b-7286-4117-97fd-f915638d19cd-kube-api-access-hsk4t\") pod \"redhat-marketplace-zj8bb\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.926325 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p94sb"] Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.926828 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:37 crc kubenswrapper[4685]: E1202 10:02:37.927101 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.427089949 +0000 UTC m=+50.798864103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:37 crc kubenswrapper[4685]: I1202 10:02:37.979689 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.005749 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xd9bz"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.006914 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.036194 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.036509 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.536494098 +0000 UTC m=+50.908268252 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.040833 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xd9bz"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.054818 4685 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.138365 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8gtg\" (UniqueName: \"kubernetes.io/projected/05691e86-e66f-45a4-91cd-eb1045b4a56e-kube-api-access-q8gtg\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.138411 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-catalog-content\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.138474 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.138516 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-utilities\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.138901 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.638886184 +0000 UTC m=+51.010660338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.239204 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.239449 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.73941667 +0000 UTC m=+51.111190834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.239632 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-utilities\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.240087 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-utilities\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.240194 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8gtg\" (UniqueName: \"kubernetes.io/projected/05691e86-e66f-45a4-91cd-eb1045b4a56e-kube-api-access-q8gtg\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.240215 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-catalog-content\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.240270 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.240658 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.740636303 +0000 UTC m=+51.112410457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.240919 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-catalog-content\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.312863 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8gtg\" (UniqueName: \"kubernetes.io/projected/05691e86-e66f-45a4-91cd-eb1045b4a56e-kube-api-access-q8gtg\") pod \"redhat-marketplace-xd9bz\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.334165 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.339638 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.342925 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.343422 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.84340327 +0000 UTC m=+51.215177424 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.358515 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-fdsrt" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.431337 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:38 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:38 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:38 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.431397 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.444153 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrqt4\" (UniqueName: \"kubernetes.io/projected/7def2210-de93-4381-84dd-fe0d507f76b3-kube-api-access-rrqt4\") pod \"7def2210-de93-4381-84dd-fe0d507f76b3\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.444204 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7def2210-de93-4381-84dd-fe0d507f76b3-config-volume\") pod \"7def2210-de93-4381-84dd-fe0d507f76b3\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.444240 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7def2210-de93-4381-84dd-fe0d507f76b3-secret-volume\") pod \"7def2210-de93-4381-84dd-fe0d507f76b3\" (UID: \"7def2210-de93-4381-84dd-fe0d507f76b3\") " Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.444368 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.446072 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:38.946057575 +0000 UTC m=+51.317831729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.446107 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7def2210-de93-4381-84dd-fe0d507f76b3-config-volume" (OuterVolumeSpecName: "config-volume") pod "7def2210-de93-4381-84dd-fe0d507f76b3" (UID: "7def2210-de93-4381-84dd-fe0d507f76b3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.449049 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7def2210-de93-4381-84dd-fe0d507f76b3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7def2210-de93-4381-84dd-fe0d507f76b3" (UID: "7def2210-de93-4381-84dd-fe0d507f76b3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.449253 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7def2210-de93-4381-84dd-fe0d507f76b3-kube-api-access-rrqt4" (OuterVolumeSpecName: "kube-api-access-rrqt4") pod "7def2210-de93-4381-84dd-fe0d507f76b3" (UID: "7def2210-de93-4381-84dd-fe0d507f76b3"). InnerVolumeSpecName "kube-api-access-rrqt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.533195 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.533580 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.545000 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj8bb"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.545455 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.545686 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrqt4\" (UniqueName: \"kubernetes.io/projected/7def2210-de93-4381-84dd-fe0d507f76b3-kube-api-access-rrqt4\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.545698 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7def2210-de93-4381-84dd-fe0d507f76b3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.545707 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7def2210-de93-4381-84dd-fe0d507f76b3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.545761 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:39.045747187 +0000 UTC m=+51.417521331 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.565581 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.609155 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.609672 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.646779 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.649748 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:39.149732617 +0000 UTC m=+51.521506771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.709458 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.709681 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7def2210-de93-4381-84dd-fe0d507f76b3" containerName="collect-profiles" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.709694 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="7def2210-de93-4381-84dd-fe0d507f76b3" containerName="collect-profiles" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.709785 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="7def2210-de93-4381-84dd-fe0d507f76b3" containerName="collect-profiles" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.710251 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.714951 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.715158 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.729952 4685 generic.go:334] "Generic (PLEG): container finished" podID="5c2625f0-cd02-453e-bd74-8a7ba8937ea7" containerID="7790a7842ff9b0efc8e0d3b3bb0b68dbe34072af66633e90fa6b0bb739493abf" exitCode=0 Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.730031 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5c2625f0-cd02-453e-bd74-8a7ba8937ea7","Type":"ContainerDied","Data":"7790a7842ff9b0efc8e0d3b3bb0b68dbe34072af66633e90fa6b0bb739493abf"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.739392 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.748039 4685 generic.go:334] "Generic (PLEG): container finished" podID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerID="823a1d8e2e1ccbfe50f1304c63c926cb87b26fa021f0ef627bdd15715f1a25be" exitCode=0 Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.748162 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerDied","Data":"823a1d8e2e1ccbfe50f1304c63c926cb87b26fa021f0ef627bdd15715f1a25be"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.748190 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerStarted","Data":"afe1477af027b1e1b06ce152abacd2f9e2eee8a8ffc630bce407221c912fc8ca"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.749282 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.749691 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.749719 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.749833 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:39.249815251 +0000 UTC m=+51.621589405 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.768806 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerStarted","Data":"6b435c3064fd0cf9692d5df5d2a3871aebac2752e08f64522c3dc35f40b8d8dd"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.792732 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" event={"ID":"c69d86c9-b765-4489-ab56-d93126192812","Type":"ContainerStarted","Data":"a140612838cb00b6f1f10dc657803c8d48f96e7400056ad4d3c3c0cb6120419f"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.815172 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.816174 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76" event={"ID":"7def2210-de93-4381-84dd-fe0d507f76b3","Type":"ContainerDied","Data":"48d2a10a362b6f950d0611efd9be893d144d58ed12bf4098e84ac5e77806969d"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.816231 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48d2a10a362b6f950d0611efd9be893d144d58ed12bf4098e84ac5e77806969d" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.818192 4685 generic.go:334] "Generic (PLEG): container finished" podID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerID="0ce8d400793a01179fed2da6b9dffc0fc6eb3e6ea798837bbf0965e82c6e335c" exitCode=0 Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.818296 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96v4q" event={"ID":"2c40690c-d875-4eef-9c0c-0a174aa43ca8","Type":"ContainerDied","Data":"0ce8d400793a01179fed2da6b9dffc0fc6eb3e6ea798837bbf0965e82c6e335c"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.821216 4685 generic.go:334] "Generic (PLEG): container finished" podID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerID="9d0c6151479033c121164abfa2b0c3e31fcf7ac291cd78d6ffd9c1bf937735d5" exitCode=0 Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.827441 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8vsrk" event={"ID":"780a97cc-9fca-4b67-a956-8be8a6eb3d08","Type":"ContainerDied","Data":"9d0c6151479033c121164abfa2b0c3e31fcf7ac291cd78d6ffd9c1bf937735d5"} Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.838353 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lf7qs"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.839332 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-28vxg" podStartSLOduration=17.839309627 podStartE2EDuration="17.839309627s" podCreationTimestamp="2025-12-02 10:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:38.830399345 +0000 UTC m=+51.202173519" watchObservedRunningTime="2025-12-02 10:02:38.839309627 +0000 UTC m=+51.211083781" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.839423 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.841967 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.843511 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-z9vhz" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.848105 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lf7qs"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.852014 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.852056 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.852099 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.861636 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.869332 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-02 10:02:39.369317004 +0000 UTC m=+51.741091158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-fcrk7" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.912672 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xd9bz"] Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.947232 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.996195 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.996448 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-catalog-content\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.996476 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-utilities\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.996510 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4pwf\" (UniqueName: \"kubernetes.io/projected/940db863-41cb-461b-ab9f-80663af52acf-kube-api-access-c4pwf\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:38 crc kubenswrapper[4685]: I1202 10:02:38.996637 4685 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-02T10:02:38.054844597Z","Handler":null,"Name":""} Dec 02 10:02:38 crc kubenswrapper[4685]: E1202 10:02:38.996854 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-02 10:02:39.496837304 +0000 UTC m=+51.868611458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.024306 4685 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.024342 4685 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.066128 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.097707 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-catalog-content\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.097754 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-utilities\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.097788 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4pwf\" (UniqueName: \"kubernetes.io/projected/940db863-41cb-461b-ab9f-80663af52acf-kube-api-access-c4pwf\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.097841 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.098713 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-catalog-content\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.099276 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-utilities\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.136769 4685 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.136815 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.150738 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4pwf\" (UniqueName: \"kubernetes.io/projected/940db863-41cb-461b-ab9f-80663af52acf-kube-api-access-c4pwf\") pod \"redhat-operators-lf7qs\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.183647 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.210151 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rp5qc"] Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.211830 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.235815 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rp5qc"] Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.264289 4685 patch_prober.go:28] interesting pod/apiserver-76f77b778f-ksvk4 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]log ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]etcd ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/generic-apiserver-start-informers ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/max-in-flight-filter ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 02 10:02:39 crc kubenswrapper[4685]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 02 10:02:39 crc kubenswrapper[4685]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/project.openshift.io-projectcache ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/openshift.io-startinformers ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 02 10:02:39 crc kubenswrapper[4685]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 02 10:02:39 crc kubenswrapper[4685]: livez check failed Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.264609 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" podUID="6651d927-5e58-43b0-8c92-f425f6145e31" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.291001 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-fcrk7\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.301350 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.301652 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-utilities\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.302549 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-catalog-content\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.302628 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bznjs\" (UniqueName: \"kubernetes.io/projected/9ea11cb0-f32a-4346-b195-ce6c7176553b-kube-api-access-bznjs\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.311163 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.411309 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-catalog-content\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.411396 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bznjs\" (UniqueName: \"kubernetes.io/projected/9ea11cb0-f32a-4346-b195-ce6c7176553b-kube-api-access-bznjs\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.411450 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-utilities\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.412327 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-utilities\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.412440 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-catalog-content\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.426950 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:39 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:39 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:39 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.427015 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.427139 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.430545 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.435293 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bznjs\" (UniqueName: \"kubernetes.io/projected/9ea11cb0-f32a-4346-b195-ce6c7176553b-kube-api-access-bznjs\") pod \"redhat-operators-rp5qc\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.543168 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.634184 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lf7qs"] Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.832576 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerStarted","Data":"5820c520e2e13fc5a3339073c0300ea46fad6ade6f219917350a0a997e7e0a52"} Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.835158 4685 generic.go:334] "Generic (PLEG): container finished" podID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerID="e39a50d44849a14dd8a37945cb8cc0a9156180a7b5368e2d9f96a82c0218dd9b" exitCode=0 Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.835327 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerDied","Data":"e39a50d44849a14dd8a37945cb8cc0a9156180a7b5368e2d9f96a82c0218dd9b"} Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.839813 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac","Type":"ContainerStarted","Data":"3a2db3b7d3e01bb730fa26ef70bf8f0272742e2a89e64a11b7476585ef499408"} Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.863447 4685 generic.go:334] "Generic (PLEG): container finished" podID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerID="115fae1201bf7b97942cf4476480f2dc7dea036d92368b63640550eac7c33b5a" exitCode=0 Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.863854 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xd9bz" event={"ID":"05691e86-e66f-45a4-91cd-eb1045b4a56e","Type":"ContainerDied","Data":"115fae1201bf7b97942cf4476480f2dc7dea036d92368b63640550eac7c33b5a"} Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.863908 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xd9bz" event={"ID":"05691e86-e66f-45a4-91cd-eb1045b4a56e","Type":"ContainerStarted","Data":"689558aac1a999b0e2fb0d997be7de2f440c76a0dd88d70594053e06a6fc0cb6"} Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.934843 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 02 10:02:39 crc kubenswrapper[4685]: I1202 10:02:39.939840 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fcrk7"] Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.230892 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.317316 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rp5qc"] Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.336916 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kubelet-dir\") pod \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.337061 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kube-api-access\") pod \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\" (UID: \"5c2625f0-cd02-453e-bd74-8a7ba8937ea7\") " Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.337812 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5c2625f0-cd02-453e-bd74-8a7ba8937ea7" (UID: "5c2625f0-cd02-453e-bd74-8a7ba8937ea7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:02:40 crc kubenswrapper[4685]: W1202 10:02:40.338825 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ea11cb0_f32a_4346_b195_ce6c7176553b.slice/crio-cb7a54c6352f56cc7adf10051a337e70ce3f999ba2f2144295de213f9cf0788d WatchSource:0}: Error finding container cb7a54c6352f56cc7adf10051a337e70ce3f999ba2f2144295de213f9cf0788d: Status 404 returned error can't find the container with id cb7a54c6352f56cc7adf10051a337e70ce3f999ba2f2144295de213f9cf0788d Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.345732 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5c2625f0-cd02-453e-bd74-8a7ba8937ea7" (UID: "5c2625f0-cd02-453e-bd74-8a7ba8937ea7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.432593 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:40 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:40 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:40 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.432692 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.438297 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.438325 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5c2625f0-cd02-453e-bd74-8a7ba8937ea7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.512751 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.530704 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.869872 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerStarted","Data":"cb7a54c6352f56cc7adf10051a337e70ce3f999ba2f2144295de213f9cf0788d"} Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.873192 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" event={"ID":"8a4fad5a-444e-4161-b3df-a473b62dca2d","Type":"ContainerStarted","Data":"e9225d0fda879278b5d33be4b47547e4458c195c2b69ba4585d0a4cfaa9c6a94"} Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.875878 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5c2625f0-cd02-453e-bd74-8a7ba8937ea7","Type":"ContainerDied","Data":"e9ff156120185c7736e2de9b5986b22128a5091f115f5974078c292869195659"} Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.875906 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9ff156120185c7736e2de9b5986b22128a5091f115f5974078c292869195659" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.875965 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 02 10:02:40 crc kubenswrapper[4685]: I1202 10:02:40.906059 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=0.906034666 podStartE2EDuration="906.034666ms" podCreationTimestamp="2025-12-02 10:02:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:40.900840885 +0000 UTC m=+53.272615039" watchObservedRunningTime="2025-12-02 10:02:40.906034666 +0000 UTC m=+53.277808840" Dec 02 10:02:41 crc kubenswrapper[4685]: I1202 10:02:41.427285 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:41 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:41 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:41 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:41 crc kubenswrapper[4685]: I1202 10:02:41.427339 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:41 crc kubenswrapper[4685]: I1202 10:02:41.883219 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerStarted","Data":"1d75db56749234660a77fb3dfc7a3419fc1d7565fcc53dc190bc1b154a9b9893"} Dec 02 10:02:42 crc kubenswrapper[4685]: I1202 10:02:42.427977 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:42 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:42 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:42 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:42 crc kubenswrapper[4685]: I1202 10:02:42.428045 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:42 crc kubenswrapper[4685]: I1202 10:02:42.831165 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-vwpld" Dec 02 10:02:42 crc kubenswrapper[4685]: I1202 10:02:42.909001 4685 generic.go:334] "Generic (PLEG): container finished" podID="940db863-41cb-461b-ab9f-80663af52acf" containerID="1d75db56749234660a77fb3dfc7a3419fc1d7565fcc53dc190bc1b154a9b9893" exitCode=0 Dec 02 10:02:42 crc kubenswrapper[4685]: I1202 10:02:42.909211 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerDied","Data":"1d75db56749234660a77fb3dfc7a3419fc1d7565fcc53dc190bc1b154a9b9893"} Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.427124 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:43 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:43 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:43 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.427204 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.607541 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.613299 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-ksvk4" Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.703318 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-m7vnk container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.703373 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-m7vnk" podUID="60826f81-d0cb-4339-83b0-d40a5df1aff8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.703402 4685 patch_prober.go:28] interesting pod/downloads-7954f5f757-m7vnk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.703456 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-m7vnk" podUID="60826f81-d0cb-4339-83b0-d40a5df1aff8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.720801 4685 patch_prober.go:28] interesting pod/console-f9d7485db-hgnrz container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.720856 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hgnrz" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 02 10:02:43 crc kubenswrapper[4685]: I1202 10:02:43.926093 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac","Type":"ContainerStarted","Data":"b9f14f6d8110cd6877804b003516344ad99f1ff36c70fc98991f93cba97a30a0"} Dec 02 10:02:44 crc kubenswrapper[4685]: I1202 10:02:44.425746 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:44 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:44 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:44 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:44 crc kubenswrapper[4685]: I1202 10:02:44.425835 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:44 crc kubenswrapper[4685]: E1202 10:02:44.843020 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:02:44 crc kubenswrapper[4685]: E1202 10:02:44.845406 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:02:44 crc kubenswrapper[4685]: E1202 10:02:44.847538 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:02:44 crc kubenswrapper[4685]: E1202 10:02:44.847690 4685 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:02:44 crc kubenswrapper[4685]: I1202 10:02:44.934770 4685 generic.go:334] "Generic (PLEG): container finished" podID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerID="7310837e9b85ff4292c2016b4986172fe6f1a52bc0587f113a16e6159adb31e7" exitCode=0 Dec 02 10:02:44 crc kubenswrapper[4685]: I1202 10:02:44.934852 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerDied","Data":"7310837e9b85ff4292c2016b4986172fe6f1a52bc0587f113a16e6159adb31e7"} Dec 02 10:02:44 crc kubenswrapper[4685]: I1202 10:02:44.936649 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" event={"ID":"8a4fad5a-444e-4161-b3df-a473b62dca2d","Type":"ContainerStarted","Data":"d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915"} Dec 02 10:02:44 crc kubenswrapper[4685]: I1202 10:02:44.997639 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:02:45 crc kubenswrapper[4685]: I1202 10:02:45.425640 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:45 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:45 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:45 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:45 crc kubenswrapper[4685]: I1202 10:02:45.425692 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:46 crc kubenswrapper[4685]: I1202 10:02:46.425196 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:46 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:46 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:46 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:46 crc kubenswrapper[4685]: I1202 10:02:46.425251 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:46 crc kubenswrapper[4685]: I1202 10:02:46.953131 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:02:46 crc kubenswrapper[4685]: I1202 10:02:46.979876 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" podStartSLOduration=39.979859784 podStartE2EDuration="39.979859784s" podCreationTimestamp="2025-12-02 10:02:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:46.975169676 +0000 UTC m=+59.346943820" watchObservedRunningTime="2025-12-02 10:02:46.979859784 +0000 UTC m=+59.351633938" Dec 02 10:02:47 crc kubenswrapper[4685]: I1202 10:02:47.426212 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:47 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:47 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:47 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:47 crc kubenswrapper[4685]: I1202 10:02:47.426466 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:47 crc kubenswrapper[4685]: I1202 10:02:47.968628 4685 generic.go:334] "Generic (PLEG): container finished" podID="e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac" containerID="b9f14f6d8110cd6877804b003516344ad99f1ff36c70fc98991f93cba97a30a0" exitCode=0 Dec 02 10:02:47 crc kubenswrapper[4685]: I1202 10:02:47.969372 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac","Type":"ContainerDied","Data":"b9f14f6d8110cd6877804b003516344ad99f1ff36c70fc98991f93cba97a30a0"} Dec 02 10:02:48 crc kubenswrapper[4685]: I1202 10:02:48.427069 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:48 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:48 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:48 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:48 crc kubenswrapper[4685]: I1202 10:02:48.427438 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:49 crc kubenswrapper[4685]: I1202 10:02:49.425646 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:49 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:49 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:49 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:49 crc kubenswrapper[4685]: I1202 10:02:49.426003 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:50 crc kubenswrapper[4685]: I1202 10:02:50.425261 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:50 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:50 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:50 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:50 crc kubenswrapper[4685]: I1202 10:02:50.425322 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:51 crc kubenswrapper[4685]: I1202 10:02:51.426379 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:51 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:51 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:51 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:51 crc kubenswrapper[4685]: I1202 10:02:51.426500 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:52 crc kubenswrapper[4685]: I1202 10:02:52.426357 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:52 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:52 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:52 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:52 crc kubenswrapper[4685]: I1202 10:02:52.426799 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:52 crc kubenswrapper[4685]: I1202 10:02:52.913925 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 02 10:02:53 crc kubenswrapper[4685]: I1202 10:02:53.426215 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:53 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:53 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:53 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:53 crc kubenswrapper[4685]: I1202 10:02:53.426285 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:53 crc kubenswrapper[4685]: I1202 10:02:53.708450 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-m7vnk" Dec 02 10:02:53 crc kubenswrapper[4685]: I1202 10:02:53.721398 4685 patch_prober.go:28] interesting pod/console-f9d7485db-hgnrz container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 02 10:02:53 crc kubenswrapper[4685]: I1202 10:02:53.721841 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-hgnrz" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 02 10:02:53 crc kubenswrapper[4685]: I1202 10:02:53.730777 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=1.73076111 podStartE2EDuration="1.73076111s" podCreationTimestamp="2025-12-02 10:02:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:02:53.729633599 +0000 UTC m=+66.101407773" watchObservedRunningTime="2025-12-02 10:02:53.73076111 +0000 UTC m=+66.102535274" Dec 02 10:02:54 crc kubenswrapper[4685]: I1202 10:02:54.425132 4685 patch_prober.go:28] interesting pod/router-default-5444994796-tkpq4 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 02 10:02:54 crc kubenswrapper[4685]: [-]has-synced failed: reason withheld Dec 02 10:02:54 crc kubenswrapper[4685]: [+]process-running ok Dec 02 10:02:54 crc kubenswrapper[4685]: healthz check failed Dec 02 10:02:54 crc kubenswrapper[4685]: I1202 10:02:54.425496 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-tkpq4" podUID="096d8145-ede4-4af3-a326-7a1739ca1dc4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 02 10:02:54 crc kubenswrapper[4685]: I1202 10:02:54.503766 4685 patch_prober.go:28] interesting pod/console-operator-58897d9998-mfk9b container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:02:54 crc kubenswrapper[4685]: I1202 10:02:54.504298 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" podUID="492f18b2-8ffc-4f93-a2de-6bf204e1a4f3" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 10:02:54 crc kubenswrapper[4685]: I1202 10:02:54.503808 4685 patch_prober.go:28] interesting pod/console-operator-58897d9998-mfk9b container/console-operator namespace/openshift-console-operator: Liveness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:02:54 crc kubenswrapper[4685]: I1202 10:02:54.504514 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console-operator/console-operator-58897d9998-mfk9b" podUID="492f18b2-8ffc-4f93-a2de-6bf204e1a4f3" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 10:02:54 crc kubenswrapper[4685]: E1202 10:02:54.840906 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:02:54 crc kubenswrapper[4685]: E1202 10:02:54.843850 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:02:54 crc kubenswrapper[4685]: E1202 10:02:54.845536 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:02:54 crc kubenswrapper[4685]: E1202 10:02:54.845642 4685 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:02:55 crc kubenswrapper[4685]: I1202 10:02:55.429904 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:55 crc kubenswrapper[4685]: I1202 10:02:55.432843 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-tkpq4" Dec 02 10:02:56 crc kubenswrapper[4685]: I1202 10:02:56.038087 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-t84dj_12f6ce83-cb37-4e64-92c4-fb96aea3b213/cluster-samples-operator/0.log" Dec 02 10:02:56 crc kubenswrapper[4685]: I1202 10:02:56.038329 4685 generic.go:334] "Generic (PLEG): container finished" podID="12f6ce83-cb37-4e64-92c4-fb96aea3b213" containerID="1cea13510c9034a5f4eb0f438857b548939c8c03d1bf9c95d0989122af9b8e22" exitCode=2 Dec 02 10:02:56 crc kubenswrapper[4685]: I1202 10:02:56.038425 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" event={"ID":"12f6ce83-cb37-4e64-92c4-fb96aea3b213","Type":"ContainerDied","Data":"1cea13510c9034a5f4eb0f438857b548939c8c03d1bf9c95d0989122af9b8e22"} Dec 02 10:02:56 crc kubenswrapper[4685]: I1202 10:02:56.040056 4685 scope.go:117] "RemoveContainer" containerID="1cea13510c9034a5f4eb0f438857b548939c8c03d1bf9c95d0989122af9b8e22" Dec 02 10:02:59 crc kubenswrapper[4685]: I1202 10:02:59.434755 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:03:03 crc kubenswrapper[4685]: I1202 10:03:03.995035 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:03:03 crc kubenswrapper[4685]: I1202 10:03:03.998952 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:03:04 crc kubenswrapper[4685]: I1202 10:03:04.040646 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 02 10:03:04 crc kubenswrapper[4685]: E1202 10:03:04.842453 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:04 crc kubenswrapper[4685]: E1202 10:03:04.845204 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:04 crc kubenswrapper[4685]: E1202 10:03:04.846397 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:04 crc kubenswrapper[4685]: E1202 10:03:04.846427 4685 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:03:04 crc kubenswrapper[4685]: I1202 10:03:04.990060 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-n94sx" Dec 02 10:03:14 crc kubenswrapper[4685]: E1202 10:03:14.840791 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:14 crc kubenswrapper[4685]: E1202 10:03:14.841695 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:14 crc kubenswrapper[4685]: E1202 10:03:14.842887 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:14 crc kubenswrapper[4685]: E1202 10:03:14.843003 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.157396 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-96htj_20083215-8e96-41e3-b76f-68dd87550bc6/kube-multus-additional-cni-plugins/0.log" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.157541 4685 generic.go:334] "Generic (PLEG): container finished" podID="20083215-8e96-41e3-b76f-68dd87550bc6" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" exitCode=137 Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.157638 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" event={"ID":"20083215-8e96-41e3-b76f-68dd87550bc6","Type":"ContainerDied","Data":"f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a"} Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.297520 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 10:03:16 crc kubenswrapper[4685]: E1202 10:03:16.298070 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c2625f0-cd02-453e-bd74-8a7ba8937ea7" containerName="pruner" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.298099 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c2625f0-cd02-453e-bd74-8a7ba8937ea7" containerName="pruner" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.298419 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c2625f0-cd02-453e-bd74-8a7ba8937ea7" containerName="pruner" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.299310 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.317642 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.360973 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e980c2-336e-484f-a9dc-43116ceae46f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.361043 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e980c2-336e-484f-a9dc-43116ceae46f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.462439 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e980c2-336e-484f-a9dc-43116ceae46f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.462510 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e980c2-336e-484f-a9dc-43116ceae46f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.462624 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e980c2-336e-484f-a9dc-43116ceae46f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.485263 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e980c2-336e-484f-a9dc-43116ceae46f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:16 crc kubenswrapper[4685]: I1202 10:03:16.631102 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:03:17 crc kubenswrapper[4685]: I1202 10:03:17.921718 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.687623 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.688684 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.714823 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.714797143 podStartE2EDuration="3.714797143s" podCreationTimestamp="2025-12-02 10:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:03:20.711667418 +0000 UTC m=+93.083441572" watchObservedRunningTime="2025-12-02 10:03:20.714797143 +0000 UTC m=+93.086571287" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.725748 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-var-lock\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.725823 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.725848 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kube-api-access\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.826530 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-var-lock\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.826865 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.826992 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kube-api-access\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.826910 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kubelet-dir\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.826739 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-var-lock\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:20 crc kubenswrapper[4685]: I1202 10:03:20.844066 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kube-api-access\") pod \"installer-9-crc\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:21 crc kubenswrapper[4685]: I1202 10:03:21.004256 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.521229 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.653158 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.769327 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kube-api-access\") pod \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.769399 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kubelet-dir\") pod \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\" (UID: \"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac\") " Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.769676 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac" (UID: "e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.784172 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac" (UID: "e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.870783 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:03:23 crc kubenswrapper[4685]: I1202 10:03:23.870825 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:03:24 crc kubenswrapper[4685]: I1202 10:03:24.207544 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac","Type":"ContainerDied","Data":"3a2db3b7d3e01bb730fa26ef70bf8f0272742e2a89e64a11b7476585ef499408"} Dec 02 10:03:24 crc kubenswrapper[4685]: I1202 10:03:24.207603 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 02 10:03:24 crc kubenswrapper[4685]: I1202 10:03:24.207608 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a2db3b7d3e01bb730fa26ef70bf8f0272742e2a89e64a11b7476585ef499408" Dec 02 10:03:24 crc kubenswrapper[4685]: E1202 10:03:24.841015 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:24 crc kubenswrapper[4685]: E1202 10:03:24.841776 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:24 crc kubenswrapper[4685]: E1202 10:03:24.842135 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:24 crc kubenswrapper[4685]: E1202 10:03:24.842201 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:03:34 crc kubenswrapper[4685]: E1202 10:03:34.840496 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:34 crc kubenswrapper[4685]: E1202 10:03:34.841499 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:34 crc kubenswrapper[4685]: E1202 10:03:34.842055 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 02 10:03:34 crc kubenswrapper[4685]: E1202 10:03:34.842133 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.910379 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-96htj_20083215-8e96-41e3-b76f-68dd87550bc6/kube-multus-additional-cni-plugins/0.log" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.910961 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.923973 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvmv2\" (UniqueName: \"kubernetes.io/projected/20083215-8e96-41e3-b76f-68dd87550bc6-kube-api-access-mvmv2\") pod \"20083215-8e96-41e3-b76f-68dd87550bc6\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.924028 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/20083215-8e96-41e3-b76f-68dd87550bc6-tuning-conf-dir\") pod \"20083215-8e96-41e3-b76f-68dd87550bc6\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.924071 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/20083215-8e96-41e3-b76f-68dd87550bc6-cni-sysctl-allowlist\") pod \"20083215-8e96-41e3-b76f-68dd87550bc6\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.924129 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/20083215-8e96-41e3-b76f-68dd87550bc6-ready\") pod \"20083215-8e96-41e3-b76f-68dd87550bc6\" (UID: \"20083215-8e96-41e3-b76f-68dd87550bc6\") " Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.924443 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20083215-8e96-41e3-b76f-68dd87550bc6-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "20083215-8e96-41e3-b76f-68dd87550bc6" (UID: "20083215-8e96-41e3-b76f-68dd87550bc6"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.924642 4685 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/20083215-8e96-41e3-b76f-68dd87550bc6-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.925281 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20083215-8e96-41e3-b76f-68dd87550bc6-ready" (OuterVolumeSpecName: "ready") pod "20083215-8e96-41e3-b76f-68dd87550bc6" (UID: "20083215-8e96-41e3-b76f-68dd87550bc6"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.925372 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20083215-8e96-41e3-b76f-68dd87550bc6-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "20083215-8e96-41e3-b76f-68dd87550bc6" (UID: "20083215-8e96-41e3-b76f-68dd87550bc6"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:03:37 crc kubenswrapper[4685]: I1202 10:03:37.969930 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20083215-8e96-41e3-b76f-68dd87550bc6-kube-api-access-mvmv2" (OuterVolumeSpecName: "kube-api-access-mvmv2") pod "20083215-8e96-41e3-b76f-68dd87550bc6" (UID: "20083215-8e96-41e3-b76f-68dd87550bc6"). InnerVolumeSpecName "kube-api-access-mvmv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.025390 4685 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/20083215-8e96-41e3-b76f-68dd87550bc6-ready\") on node \"crc\" DevicePath \"\"" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.025428 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvmv2\" (UniqueName: \"kubernetes.io/projected/20083215-8e96-41e3-b76f-68dd87550bc6-kube-api-access-mvmv2\") on node \"crc\" DevicePath \"\"" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.025444 4685 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/20083215-8e96-41e3-b76f-68dd87550bc6-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.287299 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-96htj_20083215-8e96-41e3-b76f-68dd87550bc6/kube-multus-additional-cni-plugins/0.log" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.287341 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" event={"ID":"20083215-8e96-41e3-b76f-68dd87550bc6","Type":"ContainerDied","Data":"00310bd472bbce356c1b91a61b7230340e278ac59df4decdc38411510260bd5c"} Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.287375 4685 scope.go:117] "RemoveContainer" containerID="f6b64e7abe9ea9f101ebf3bfe52aab41797fb79cfbcdb65ccd48730a6e4cc09a" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.287462 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-96htj" Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.328739 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-96htj"] Dec 02 10:03:38 crc kubenswrapper[4685]: I1202 10:03:38.332897 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-96htj"] Dec 02 10:03:39 crc kubenswrapper[4685]: I1202 10:03:39.910894 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" path="/var/lib/kubelet/pods/20083215-8e96-41e3-b76f-68dd87550bc6/volumes" Dec 02 10:03:44 crc kubenswrapper[4685]: E1202 10:03:44.734676 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 10:03:44 crc kubenswrapper[4685]: E1202 10:03:44.735118 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z5l4l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-96v4q_openshift-marketplace(2c40690c-d875-4eef-9c0c-0a174aa43ca8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:03:44 crc kubenswrapper[4685]: E1202 10:03:44.736317 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-96v4q" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" Dec 02 10:03:55 crc kubenswrapper[4685]: E1202 10:03:55.078851 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 02 10:03:55 crc kubenswrapper[4685]: E1202 10:03:55.079382 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2qtk7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-p94sb_openshift-marketplace(6418701b-a844-4b1c-8b5e-07e1b8a3faf6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:03:55 crc kubenswrapper[4685]: E1202 10:03:55.081404 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-p94sb" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" Dec 02 10:03:55 crc kubenswrapper[4685]: E1202 10:03:55.417491 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 10:03:55 crc kubenswrapper[4685]: E1202 10:03:55.417657 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4dvbl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6kndg_openshift-marketplace(37ef1a3f-a5f2-4ec7-b996-2c8bacbac017): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:03:55 crc kubenswrapper[4685]: E1202 10:03:55.418795 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6kndg" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" Dec 02 10:03:56 crc kubenswrapper[4685]: E1202 10:03:56.130900 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 02 10:03:56 crc kubenswrapper[4685]: E1202 10:03:56.131070 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p9znt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8vsrk_openshift-marketplace(780a97cc-9fca-4b67-a956-8be8a6eb3d08): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:03:56 crc kubenswrapper[4685]: E1202 10:03:56.132240 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-8vsrk" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" Dec 02 10:04:00 crc kubenswrapper[4685]: E1202 10:04:00.427154 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-p94sb" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" Dec 02 10:04:00 crc kubenswrapper[4685]: E1202 10:04:00.427160 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6kndg" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" Dec 02 10:04:00 crc kubenswrapper[4685]: E1202 10:04:00.427229 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-8vsrk" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" Dec 02 10:04:00 crc kubenswrapper[4685]: E1202 10:04:00.483030 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 10:04:00 crc kubenswrapper[4685]: E1202 10:04:00.483593 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c4pwf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-lf7qs_openshift-marketplace(940db863-41cb-461b-ab9f-80663af52acf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:04:00 crc kubenswrapper[4685]: E1202 10:04:00.484885 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-lf7qs" podUID="940db863-41cb-461b-ab9f-80663af52acf" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.737095 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-lf7qs" podUID="940db863-41cb-461b-ab9f-80663af52acf" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.780339 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.780721 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bznjs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rp5qc_openshift-marketplace(9ea11cb0-f32a-4346-b195-ce6c7176553b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.782134 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rp5qc" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.793894 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.794064 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hsk4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-zj8bb_openshift-marketplace(4eddbb2b-7286-4117-97fd-f915638d19cd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.795654 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-zj8bb" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.834451 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.834612 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q8gtg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-xd9bz_openshift-marketplace(05691e86-e66f-45a4-91cd-eb1045b4a56e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 02 10:04:01 crc kubenswrapper[4685]: E1202 10:04:01.835674 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-xd9bz" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" Dec 02 10:04:01 crc kubenswrapper[4685]: I1202 10:04:01.983622 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 02 10:04:02 crc kubenswrapper[4685]: I1202 10:04:02.027620 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 02 10:04:02 crc kubenswrapper[4685]: I1202 10:04:02.433235 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"90e980c2-336e-484f-a9dc-43116ceae46f","Type":"ContainerStarted","Data":"36097ee65f8869a703263b9bfd7a11d26dabbfdf22d6bd879a3ba5c070a362b4"} Dec 02 10:04:02 crc kubenswrapper[4685]: I1202 10:04:02.436822 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-t84dj_12f6ce83-cb37-4e64-92c4-fb96aea3b213/cluster-samples-operator/0.log" Dec 02 10:04:02 crc kubenswrapper[4685]: I1202 10:04:02.436902 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-t84dj" event={"ID":"12f6ce83-cb37-4e64-92c4-fb96aea3b213","Type":"ContainerStarted","Data":"96bfc84a7086b64891c0d984294a6f4b68b0b6c8c8085cda46cf9667ffe0d76e"} Dec 02 10:04:02 crc kubenswrapper[4685]: I1202 10:04:02.438262 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"659d9a5f-88ad-480f-88d4-24d9dda24b3e","Type":"ContainerStarted","Data":"6e73d2c799c314daa635f314568b2ce3d6920ace21053270694c49c935ee1497"} Dec 02 10:04:02 crc kubenswrapper[4685]: E1202 10:04:02.630853 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-zj8bb" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" Dec 02 10:04:02 crc kubenswrapper[4685]: E1202 10:04:02.630861 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-xd9bz" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" Dec 02 10:04:03 crc kubenswrapper[4685]: I1202 10:04:03.446494 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"659d9a5f-88ad-480f-88d4-24d9dda24b3e","Type":"ContainerStarted","Data":"30b6e65e922441240070d58f83557f841a97f4c1af3ff0ca16d280caed054168"} Dec 02 10:04:03 crc kubenswrapper[4685]: I1202 10:04:03.448816 4685 generic.go:334] "Generic (PLEG): container finished" podID="90e980c2-336e-484f-a9dc-43116ceae46f" containerID="0165dfe9e26a4c7c66a69d460ac741876c84b396c7189d929dc1722921eee6c3" exitCode=0 Dec 02 10:04:03 crc kubenswrapper[4685]: I1202 10:04:03.448950 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"90e980c2-336e-484f-a9dc-43116ceae46f","Type":"ContainerDied","Data":"0165dfe9e26a4c7c66a69d460ac741876c84b396c7189d929dc1722921eee6c3"} Dec 02 10:04:03 crc kubenswrapper[4685]: I1202 10:04:03.451811 4685 generic.go:334] "Generic (PLEG): container finished" podID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerID="665dbadd27133dec8b742dc50b62319008ea8d3b7058d43c788f7d1c6e5b6d35" exitCode=0 Dec 02 10:04:03 crc kubenswrapper[4685]: I1202 10:04:03.451905 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96v4q" event={"ID":"2c40690c-d875-4eef-9c0c-0a174aa43ca8","Type":"ContainerDied","Data":"665dbadd27133dec8b742dc50b62319008ea8d3b7058d43c788f7d1c6e5b6d35"} Dec 02 10:04:03 crc kubenswrapper[4685]: I1202 10:04:03.484286 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=43.484268603 podStartE2EDuration="43.484268603s" podCreationTimestamp="2025-12-02 10:03:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:04:03.469608936 +0000 UTC m=+135.841383080" watchObservedRunningTime="2025-12-02 10:04:03.484268603 +0000 UTC m=+135.856042757" Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.701288 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.764172 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e980c2-336e-484f-a9dc-43116ceae46f-kube-api-access\") pod \"90e980c2-336e-484f-a9dc-43116ceae46f\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.764274 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e980c2-336e-484f-a9dc-43116ceae46f-kubelet-dir\") pod \"90e980c2-336e-484f-a9dc-43116ceae46f\" (UID: \"90e980c2-336e-484f-a9dc-43116ceae46f\") " Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.764379 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/90e980c2-336e-484f-a9dc-43116ceae46f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "90e980c2-336e-484f-a9dc-43116ceae46f" (UID: "90e980c2-336e-484f-a9dc-43116ceae46f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.764477 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/90e980c2-336e-484f-a9dc-43116ceae46f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.769197 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90e980c2-336e-484f-a9dc-43116ceae46f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "90e980c2-336e-484f-a9dc-43116ceae46f" (UID: "90e980c2-336e-484f-a9dc-43116ceae46f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:04:04 crc kubenswrapper[4685]: I1202 10:04:04.865109 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90e980c2-336e-484f-a9dc-43116ceae46f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:05 crc kubenswrapper[4685]: I1202 10:04:05.323337 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vbfhw"] Dec 02 10:04:05 crc kubenswrapper[4685]: I1202 10:04:05.462929 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96v4q" event={"ID":"2c40690c-d875-4eef-9c0c-0a174aa43ca8","Type":"ContainerStarted","Data":"7fafb5fc66f6d4c7e4cb01d456fc8cca0c89d5873f063ab8f9768c5fb0a06afb"} Dec 02 10:04:05 crc kubenswrapper[4685]: I1202 10:04:05.464421 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"90e980c2-336e-484f-a9dc-43116ceae46f","Type":"ContainerDied","Data":"36097ee65f8869a703263b9bfd7a11d26dabbfdf22d6bd879a3ba5c070a362b4"} Dec 02 10:04:05 crc kubenswrapper[4685]: I1202 10:04:05.464450 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36097ee65f8869a703263b9bfd7a11d26dabbfdf22d6bd879a3ba5c070a362b4" Dec 02 10:04:05 crc kubenswrapper[4685]: I1202 10:04:05.464453 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 02 10:04:06 crc kubenswrapper[4685]: I1202 10:04:06.347196 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:04:06 crc kubenswrapper[4685]: I1202 10:04:06.347595 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:04:07 crc kubenswrapper[4685]: I1202 10:04:07.205590 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:04:07 crc kubenswrapper[4685]: I1202 10:04:07.224771 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-96v4q" podStartSLOduration=6.403619569 podStartE2EDuration="1m32.224750796s" podCreationTimestamp="2025-12-02 10:02:35 +0000 UTC" firstStartedPulling="2025-12-02 10:02:38.820152496 +0000 UTC m=+51.191926650" lastFinishedPulling="2025-12-02 10:04:04.641283723 +0000 UTC m=+137.013057877" observedRunningTime="2025-12-02 10:04:05.501238374 +0000 UTC m=+137.873012518" watchObservedRunningTime="2025-12-02 10:04:07.224750796 +0000 UTC m=+139.596524950" Dec 02 10:04:12 crc kubenswrapper[4685]: I1202 10:04:12.147474 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:04:12 crc kubenswrapper[4685]: I1202 10:04:12.148039 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:04:16 crc kubenswrapper[4685]: I1202 10:04:16.387689 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.567983 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerStarted","Data":"82660154c89fd907e9740908f8013dcd67410dcfd709719e58e41b446022ccf8"} Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.576360 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerStarted","Data":"2cb1a63b6745bce9d58a44b23f96333fc1421a71b13a951f85748b050d32c409"} Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.578239 4685 generic.go:334] "Generic (PLEG): container finished" podID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerID="63a7e0451c8ea64bf36cc3ce34c10434b697b1428cd739296447e7cccd625347" exitCode=0 Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.578305 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xd9bz" event={"ID":"05691e86-e66f-45a4-91cd-eb1045b4a56e","Type":"ContainerDied","Data":"63a7e0451c8ea64bf36cc3ce34c10434b697b1428cd739296447e7cccd625347"} Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.584681 4685 generic.go:334] "Generic (PLEG): container finished" podID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerID="202b984a281911f12300d840d7cb32b23e2c2a6394f6027c3af032b0fa054626" exitCode=0 Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.584760 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8vsrk" event={"ID":"780a97cc-9fca-4b67-a956-8be8a6eb3d08","Type":"ContainerDied","Data":"202b984a281911f12300d840d7cb32b23e2c2a6394f6027c3af032b0fa054626"} Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.587521 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerStarted","Data":"7cd1bc73b08f81ea3ed3dea711bba64069b9aa4d06ca3d18fe71f3add56660e2"} Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.607074 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerStarted","Data":"2588a03fb55fd3b939de3549a9b001845993e9f245c11c9028d2c9948fdd5203"} Dec 02 10:04:25 crc kubenswrapper[4685]: I1202 10:04:25.609829 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerStarted","Data":"35ea497f64891da3db053415c5fcddfed34c6cb1c8328f8c08941c928190b6f2"} Dec 02 10:04:26 crc kubenswrapper[4685]: I1202 10:04:26.630547 4685 generic.go:334] "Generic (PLEG): container finished" podID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerID="2cb1a63b6745bce9d58a44b23f96333fc1421a71b13a951f85748b050d32c409" exitCode=0 Dec 02 10:04:26 crc kubenswrapper[4685]: I1202 10:04:26.630656 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerDied","Data":"2cb1a63b6745bce9d58a44b23f96333fc1421a71b13a951f85748b050d32c409"} Dec 02 10:04:27 crc kubenswrapper[4685]: I1202 10:04:27.637019 4685 generic.go:334] "Generic (PLEG): container finished" podID="940db863-41cb-461b-ab9f-80663af52acf" containerID="7cd1bc73b08f81ea3ed3dea711bba64069b9aa4d06ca3d18fe71f3add56660e2" exitCode=0 Dec 02 10:04:27 crc kubenswrapper[4685]: I1202 10:04:27.637064 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerDied","Data":"7cd1bc73b08f81ea3ed3dea711bba64069b9aa4d06ca3d18fe71f3add56660e2"} Dec 02 10:04:27 crc kubenswrapper[4685]: I1202 10:04:27.639544 4685 generic.go:334] "Generic (PLEG): container finished" podID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerID="2588a03fb55fd3b939de3549a9b001845993e9f245c11c9028d2c9948fdd5203" exitCode=0 Dec 02 10:04:27 crc kubenswrapper[4685]: I1202 10:04:27.639625 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerDied","Data":"2588a03fb55fd3b939de3549a9b001845993e9f245c11c9028d2c9948fdd5203"} Dec 02 10:04:27 crc kubenswrapper[4685]: I1202 10:04:27.650046 4685 generic.go:334] "Generic (PLEG): container finished" podID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerID="35ea497f64891da3db053415c5fcddfed34c6cb1c8328f8c08941c928190b6f2" exitCode=0 Dec 02 10:04:27 crc kubenswrapper[4685]: I1202 10:04:27.650095 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerDied","Data":"35ea497f64891da3db053415c5fcddfed34c6cb1c8328f8c08941c928190b6f2"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.677065 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerStarted","Data":"cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.680714 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerStarted","Data":"eebeb371f7ae7a168d366e355ea77238c52e44cadd08024b3265ff448f5c3da4"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.683690 4685 generic.go:334] "Generic (PLEG): container finished" podID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerID="82660154c89fd907e9740908f8013dcd67410dcfd709719e58e41b446022ccf8" exitCode=0 Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.683749 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerDied","Data":"82660154c89fd907e9740908f8013dcd67410dcfd709719e58e41b446022ccf8"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.692032 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerStarted","Data":"76a821d0d0452aa117685020a16ac06a6e01ecd929d247493512ae3d5af2dfda"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.697552 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xd9bz" event={"ID":"05691e86-e66f-45a4-91cd-eb1045b4a56e","Type":"ContainerStarted","Data":"1b3d05ab7cdf160764d09c38abbfdc98c855564eed358a6cbea9a0497c45edda"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.701783 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8vsrk" event={"ID":"780a97cc-9fca-4b67-a956-8be8a6eb3d08","Type":"ContainerStarted","Data":"ad564476d6e8c96dd4406eeee6475114866ad9a8a1cba1c9ff8cc170581be9e1"} Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.702693 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lf7qs" podStartSLOduration=6.369340454 podStartE2EDuration="1m51.702674612s" podCreationTimestamp="2025-12-02 10:02:38 +0000 UTC" firstStartedPulling="2025-12-02 10:02:43.928749494 +0000 UTC m=+56.300523648" lastFinishedPulling="2025-12-02 10:04:29.262083652 +0000 UTC m=+161.633857806" observedRunningTime="2025-12-02 10:04:29.69970467 +0000 UTC m=+162.071478824" watchObservedRunningTime="2025-12-02 10:04:29.702674612 +0000 UTC m=+162.074448766" Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.746454 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6kndg" podStartSLOduration=3.022490819 podStartE2EDuration="1m54.746433725s" podCreationTimestamp="2025-12-02 10:02:35 +0000 UTC" firstStartedPulling="2025-12-02 10:02:37.670374493 +0000 UTC m=+50.042148647" lastFinishedPulling="2025-12-02 10:04:29.394317399 +0000 UTC m=+161.766091553" observedRunningTime="2025-12-02 10:04:29.743818462 +0000 UTC m=+162.115592606" watchObservedRunningTime="2025-12-02 10:04:29.746433725 +0000 UTC m=+162.118207869" Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.766989 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xd9bz" podStartSLOduration=3.208334906 podStartE2EDuration="1m52.766975055s" podCreationTimestamp="2025-12-02 10:02:37 +0000 UTC" firstStartedPulling="2025-12-02 10:02:39.882249552 +0000 UTC m=+52.254023706" lastFinishedPulling="2025-12-02 10:04:29.440889701 +0000 UTC m=+161.812663855" observedRunningTime="2025-12-02 10:04:29.764468056 +0000 UTC m=+162.136242210" watchObservedRunningTime="2025-12-02 10:04:29.766975055 +0000 UTC m=+162.138749209" Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.790502 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zj8bb" podStartSLOduration=3.132330832 podStartE2EDuration="1m52.790487947s" podCreationTimestamp="2025-12-02 10:02:37 +0000 UTC" firstStartedPulling="2025-12-02 10:02:39.837821754 +0000 UTC m=+52.209595908" lastFinishedPulling="2025-12-02 10:04:29.495978869 +0000 UTC m=+161.867753023" observedRunningTime="2025-12-02 10:04:29.787879775 +0000 UTC m=+162.159653939" watchObservedRunningTime="2025-12-02 10:04:29.790487947 +0000 UTC m=+162.162262101" Dec 02 10:04:29 crc kubenswrapper[4685]: I1202 10:04:29.815063 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8vsrk" podStartSLOduration=4.463998567 podStartE2EDuration="1m54.815040368s" podCreationTimestamp="2025-12-02 10:02:35 +0000 UTC" firstStartedPulling="2025-12-02 10:02:38.830620911 +0000 UTC m=+51.202395065" lastFinishedPulling="2025-12-02 10:04:29.181662712 +0000 UTC m=+161.553436866" observedRunningTime="2025-12-02 10:04:29.81042158 +0000 UTC m=+162.182195744" watchObservedRunningTime="2025-12-02 10:04:29.815040368 +0000 UTC m=+162.186814522" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.352769 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" podUID="332dd167-8e86-4071-a277-547ab88bc15d" containerName="oauth-openshift" containerID="cri-o://78646ab340a7c4ea26e3ea8d5de069efb05a75cbc1f73301105744cb18b00d5c" gracePeriod=15 Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.718081 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerStarted","Data":"95cea5c277f19a1400c7c17b44fb777b16e1770f30a2a6210ca75794786a0bf2"} Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.721346 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerStarted","Data":"a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4"} Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.723034 4685 generic.go:334] "Generic (PLEG): container finished" podID="332dd167-8e86-4071-a277-547ab88bc15d" containerID="78646ab340a7c4ea26e3ea8d5de069efb05a75cbc1f73301105744cb18b00d5c" exitCode=0 Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.723064 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" event={"ID":"332dd167-8e86-4071-a277-547ab88bc15d","Type":"ContainerDied","Data":"78646ab340a7c4ea26e3ea8d5de069efb05a75cbc1f73301105744cb18b00d5c"} Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.741389 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p94sb" podStartSLOduration=3.836478393 podStartE2EDuration="1m54.741350849s" podCreationTimestamp="2025-12-02 10:02:36 +0000 UTC" firstStartedPulling="2025-12-02 10:02:38.761652383 +0000 UTC m=+51.133426547" lastFinishedPulling="2025-12-02 10:04:29.666524849 +0000 UTC m=+162.038299003" observedRunningTime="2025-12-02 10:04:30.741102432 +0000 UTC m=+163.112876596" watchObservedRunningTime="2025-12-02 10:04:30.741350849 +0000 UTC m=+163.113125003" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.762183 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rp5qc" podStartSLOduration=45.149272928 podStartE2EDuration="1m51.762161497s" podCreationTimestamp="2025-12-02 10:02:39 +0000 UTC" firstStartedPulling="2025-12-02 10:03:23.590262443 +0000 UTC m=+95.962036607" lastFinishedPulling="2025-12-02 10:04:30.203151022 +0000 UTC m=+162.574925176" observedRunningTime="2025-12-02 10:04:30.759702098 +0000 UTC m=+163.131476262" watchObservedRunningTime="2025-12-02 10:04:30.762161497 +0000 UTC m=+163.133935651" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.814583 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870402 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-86bc4856f8-n2nfp"] Dec 02 10:04:30 crc kubenswrapper[4685]: E1202 10:04:30.870679 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac" containerName="pruner" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870699 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac" containerName="pruner" Dec 02 10:04:30 crc kubenswrapper[4685]: E1202 10:04:30.870724 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870733 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:04:30 crc kubenswrapper[4685]: E1202 10:04:30.870745 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90e980c2-336e-484f-a9dc-43116ceae46f" containerName="pruner" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870752 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="90e980c2-336e-484f-a9dc-43116ceae46f" containerName="pruner" Dec 02 10:04:30 crc kubenswrapper[4685]: E1202 10:04:30.870762 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="332dd167-8e86-4071-a277-547ab88bc15d" containerName="oauth-openshift" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870769 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="332dd167-8e86-4071-a277-547ab88bc15d" containerName="oauth-openshift" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870897 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="90e980c2-336e-484f-a9dc-43116ceae46f" containerName="pruner" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870909 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="332dd167-8e86-4071-a277-547ab88bc15d" containerName="oauth-openshift" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870921 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="20083215-8e96-41e3-b76f-68dd87550bc6" containerName="kube-multus-additional-cni-plugins" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.870930 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e79fb6d2-9c3e-4a7a-8f40-b8d72609f8ac" containerName="pruner" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.871440 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.883789 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-86bc4856f8-n2nfp"] Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.956694 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-provider-selection\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.956763 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-audit-policies\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.956791 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-service-ca\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.956990 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-login\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957431 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-error\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957494 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-serving-cert\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957527 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68td8\" (UniqueName: \"kubernetes.io/projected/332dd167-8e86-4071-a277-547ab88bc15d-kube-api-access-68td8\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957553 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-idp-0-file-data\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957601 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-cliconfig\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957633 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-trusted-ca-bundle\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957664 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-ocp-branding-template\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957696 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-session\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957730 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-router-certs\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957709 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957756 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/332dd167-8e86-4071-a277-547ab88bc15d-audit-dir\") pod \"332dd167-8e86-4071-a277-547ab88bc15d\" (UID: \"332dd167-8e86-4071-a277-547ab88bc15d\") " Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.957780 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/332dd167-8e86-4071-a277-547ab88bc15d-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958071 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-login\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958115 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgtqj\" (UniqueName: \"kubernetes.io/projected/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-kube-api-access-fgtqj\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958175 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958216 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958240 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-error\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958262 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-router-certs\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958318 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-audit-dir\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958335 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958387 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-audit-policies\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958413 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958458 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958479 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-service-ca\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958501 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-session\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958693 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958747 4685 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/332dd167-8e86-4071-a277-547ab88bc15d-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958759 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.958172 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.960192 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.962061 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.970568 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/332dd167-8e86-4071-a277-547ab88bc15d-kube-api-access-68td8" (OuterVolumeSpecName: "kube-api-access-68td8") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "kube-api-access-68td8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.974650 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.974839 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.977938 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.981114 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.981424 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.981608 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.982721 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:30 crc kubenswrapper[4685]: I1202 10:04:30.984997 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "332dd167-8e86-4071-a277-547ab88bc15d" (UID: "332dd167-8e86-4071-a277-547ab88bc15d"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059684 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059744 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-audit-policies\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059769 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059791 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059814 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-service-ca\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059841 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-session\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059869 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059896 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-login\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059917 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgtqj\" (UniqueName: \"kubernetes.io/projected/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-kube-api-access-fgtqj\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059939 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059977 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-error\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.059994 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-router-certs\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060016 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-audit-dir\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060053 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68td8\" (UniqueName: \"kubernetes.io/projected/332dd167-8e86-4071-a277-547ab88bc15d-kube-api-access-68td8\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060063 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060072 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060082 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060092 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060102 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060112 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060123 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060132 4685 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/332dd167-8e86-4071-a277-547ab88bc15d-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060141 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060151 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060160 4685 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/332dd167-8e86-4071-a277-547ab88bc15d-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060201 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-audit-dir\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.060889 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-audit-policies\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.061985 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-service-ca\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.063159 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-cliconfig\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.063179 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.063465 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-login\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.064309 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.075278 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-error\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.075379 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.075841 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-serving-cert\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.075848 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.076104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-router-certs\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.078204 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-v4-0-config-system-session\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.079935 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgtqj\" (UniqueName: \"kubernetes.io/projected/f793ad59-ffd3-416e-b1e3-9646e4c6bfa4-kube-api-access-fgtqj\") pod \"oauth-openshift-86bc4856f8-n2nfp\" (UID: \"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4\") " pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.208847 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.571405 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-86bc4856f8-n2nfp"] Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.730953 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" event={"ID":"332dd167-8e86-4071-a277-547ab88bc15d","Type":"ContainerDied","Data":"9370a078279a958aa70996fedd4075365e9153f6b963534a3bfb1b2e7d79f411"} Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.731013 4685 scope.go:117] "RemoveContainer" containerID="78646ab340a7c4ea26e3ea8d5de069efb05a75cbc1f73301105744cb18b00d5c" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.731013 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vbfhw" Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.733816 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" event={"ID":"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4","Type":"ContainerStarted","Data":"2610d4d23d83817e91b7473308b2d71a43dd70dcf194c5f5be41b3179b545046"} Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.781334 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vbfhw"] Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.783282 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vbfhw"] Dec 02 10:04:31 crc kubenswrapper[4685]: I1202 10:04:31.906930 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="332dd167-8e86-4071-a277-547ab88bc15d" path="/var/lib/kubelet/pods/332dd167-8e86-4071-a277-547ab88bc15d/volumes" Dec 02 10:04:33 crc kubenswrapper[4685]: I1202 10:04:33.753136 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" event={"ID":"f793ad59-ffd3-416e-b1e3-9646e4c6bfa4","Type":"ContainerStarted","Data":"eabcc051a75a1c3c34363548631423c03d7cede46fc0c8d2acf2896089d54c03"} Dec 02 10:04:33 crc kubenswrapper[4685]: I1202 10:04:33.753722 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:33 crc kubenswrapper[4685]: I1202 10:04:33.772867 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" Dec 02 10:04:33 crc kubenswrapper[4685]: I1202 10:04:33.789240 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-86bc4856f8-n2nfp" podStartSLOduration=28.789222072 podStartE2EDuration="28.789222072s" podCreationTimestamp="2025-12-02 10:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:04:33.788651706 +0000 UTC m=+166.160425870" watchObservedRunningTime="2025-12-02 10:04:33.789222072 +0000 UTC m=+166.160996226" Dec 02 10:04:35 crc kubenswrapper[4685]: I1202 10:04:35.516249 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:04:35 crc kubenswrapper[4685]: I1202 10:04:35.516622 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:04:35 crc kubenswrapper[4685]: I1202 10:04:35.572014 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:04:35 crc kubenswrapper[4685]: I1202 10:04:35.806436 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.004902 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.004976 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.045597 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.809777 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.831720 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.831762 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:04:36 crc kubenswrapper[4685]: I1202 10:04:36.869904 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:04:37 crc kubenswrapper[4685]: I1202 10:04:37.820848 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:04:37 crc kubenswrapper[4685]: I1202 10:04:37.980751 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:04:37 crc kubenswrapper[4685]: I1202 10:04:37.981185 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.019163 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.352343 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.352433 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.419902 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.810824 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p94sb"] Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.827831 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:04:38 crc kubenswrapper[4685]: I1202 10:04:38.828083 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.005140 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8vsrk"] Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.005429 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8vsrk" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="registry-server" containerID="cri-o://ad564476d6e8c96dd4406eeee6475114866ad9a8a1cba1c9ff8cc170581be9e1" gracePeriod=2 Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.181878 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.181952 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.227710 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.543826 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.543901 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.582253 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.788100 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p94sb" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="registry-server" containerID="cri-o://95cea5c277f19a1400c7c17b44fb777b16e1770f30a2a6210ca75794786a0bf2" gracePeriod=2 Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.836329 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:04:39 crc kubenswrapper[4685]: I1202 10:04:39.836968 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.118431 4685 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.119356 4685 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.119482 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.119787 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1" gracePeriod=15 Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.119996 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243" gracePeriod=15 Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120138 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536" gracePeriod=15 Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120178 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b" gracePeriod=15 Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120264 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd" gracePeriod=15 Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120551 4685 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120734 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120756 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120764 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120772 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120781 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120788 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120794 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120800 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120808 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120813 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120826 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120832 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 10:04:40 crc kubenswrapper[4685]: E1202 10:04:40.120839 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120845 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120947 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120959 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120969 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120978 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.120987 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.121231 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146422 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146463 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146514 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146611 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146633 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146669 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146687 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.146707 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.247918 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248155 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248256 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248387 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248519 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248633 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248768 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248100 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248774 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248812 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248831 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248852 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248909 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.248938 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.249037 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:40 crc kubenswrapper[4685]: I1202 10:04:40.249224 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.147516 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.147600 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.588865 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.598654 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.634774 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xd9bz"] Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.635373 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xd9bz" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="registry-server" containerID="cri-o://1b3d05ab7cdf160764d09c38abbfdc98c855564eed358a6cbea9a0497c45edda" gracePeriod=2 Dec 02 10:04:42 crc kubenswrapper[4685]: I1202 10:04:42.809303 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a9216d90412fbcace6064ad03bf3e4c2ebc8ced28c011789a459bcf5c45d2d39"} Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.815756 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.819866 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.820931 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243" exitCode=0 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.820955 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536" exitCode=0 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.820965 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd" exitCode=0 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.820972 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b" exitCode=2 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.821062 4685 scope.go:117] "RemoveContainer" containerID="baa2d4e1ede37562b28cd3619cf4838ff75e5b3fb71c444284c2a16d5b657212" Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.826706 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p94sb_6418701b-a844-4b1c-8b5e-07e1b8a3faf6/registry-server/0.log" Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.827404 4685 generic.go:334] "Generic (PLEG): container finished" podID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerID="95cea5c277f19a1400c7c17b44fb777b16e1770f30a2a6210ca75794786a0bf2" exitCode=137 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.827453 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerDied","Data":"95cea5c277f19a1400c7c17b44fb777b16e1770f30a2a6210ca75794786a0bf2"} Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.829177 4685 generic.go:334] "Generic (PLEG): container finished" podID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" containerID="30b6e65e922441240070d58f83557f841a97f4c1af3ff0ca16d280caed054168" exitCode=0 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.829214 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"659d9a5f-88ad-480f-88d4-24d9dda24b3e","Type":"ContainerDied","Data":"30b6e65e922441240070d58f83557f841a97f4c1af3ff0ca16d280caed054168"} Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.831114 4685 generic.go:334] "Generic (PLEG): container finished" podID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerID="1b3d05ab7cdf160764d09c38abbfdc98c855564eed358a6cbea9a0497c45edda" exitCode=0 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.831193 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xd9bz" event={"ID":"05691e86-e66f-45a4-91cd-eb1045b4a56e","Type":"ContainerDied","Data":"1b3d05ab7cdf160764d09c38abbfdc98c855564eed358a6cbea9a0497c45edda"} Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.832654 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8vsrk_780a97cc-9fca-4b67-a956-8be8a6eb3d08/registry-server/0.log" Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.833261 4685 generic.go:334] "Generic (PLEG): container finished" podID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerID="ad564476d6e8c96dd4406eeee6475114866ad9a8a1cba1c9ff8cc170581be9e1" exitCode=137 Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.833326 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8vsrk" event={"ID":"780a97cc-9fca-4b67-a956-8be8a6eb3d08","Type":"ContainerDied","Data":"ad564476d6e8c96dd4406eeee6475114866ad9a8a1cba1c9ff8cc170581be9e1"} Dec 02 10:04:43 crc kubenswrapper[4685]: I1202 10:04:43.834527 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"8b7fd2a0913512c609c8065c457e1f893021919e27d8b2306f4b1ac37cb4a22e"} Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.728636 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p94sb_6418701b-a844-4b1c-8b5e-07e1b8a3faf6/registry-server/0.log" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.736739 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.816815 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.820520 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-utilities\") pod \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.820622 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qtk7\" (UniqueName: \"kubernetes.io/projected/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-kube-api-access-2qtk7\") pod \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.820670 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-catalog-content\") pod \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\" (UID: \"6418701b-a844-4b1c-8b5e-07e1b8a3faf6\") " Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.821645 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-utilities" (OuterVolumeSpecName: "utilities") pod "6418701b-a844-4b1c-8b5e-07e1b8a3faf6" (UID: "6418701b-a844-4b1c-8b5e-07e1b8a3faf6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.835903 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-kube-api-access-2qtk7" (OuterVolumeSpecName: "kube-api-access-2qtk7") pod "6418701b-a844-4b1c-8b5e-07e1b8a3faf6" (UID: "6418701b-a844-4b1c-8b5e-07e1b8a3faf6"). InnerVolumeSpecName "kube-api-access-2qtk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.857976 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-p94sb_6418701b-a844-4b1c-8b5e-07e1b8a3faf6/registry-server/0.log" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.862793 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p94sb" event={"ID":"6418701b-a844-4b1c-8b5e-07e1b8a3faf6","Type":"ContainerDied","Data":"afe1477af027b1e1b06ce152abacd2f9e2eee8a8ffc630bce407221c912fc8ca"} Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.863053 4685 scope.go:117] "RemoveContainer" containerID="95cea5c277f19a1400c7c17b44fb777b16e1770f30a2a6210ca75794786a0bf2" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.863241 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p94sb" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.882047 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xd9bz" event={"ID":"05691e86-e66f-45a4-91cd-eb1045b4a56e","Type":"ContainerDied","Data":"689558aac1a999b0e2fb0d997be7de2f440c76a0dd88d70594053e06a6fc0cb6"} Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.882164 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xd9bz" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.889398 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.889873 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8vsrk_780a97cc-9fca-4b67-a956-8be8a6eb3d08/registry-server/0.log" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.892030 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.892491 4685 scope.go:117] "RemoveContainer" containerID="35ea497f64891da3db053415c5fcddfed34c6cb1c8328f8c08941c928190b6f2" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.896181 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6418701b-a844-4b1c-8b5e-07e1b8a3faf6" (UID: "6418701b-a844-4b1c-8b5e-07e1b8a3faf6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.908787 4685 scope.go:117] "RemoveContainer" containerID="823a1d8e2e1ccbfe50f1304c63c926cb87b26fa021f0ef627bdd15715f1a25be" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.922066 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-catalog-content\") pod \"05691e86-e66f-45a4-91cd-eb1045b4a56e\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.922189 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8gtg\" (UniqueName: \"kubernetes.io/projected/05691e86-e66f-45a4-91cd-eb1045b4a56e-kube-api-access-q8gtg\") pod \"05691e86-e66f-45a4-91cd-eb1045b4a56e\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.922917 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-utilities\") pod \"05691e86-e66f-45a4-91cd-eb1045b4a56e\" (UID: \"05691e86-e66f-45a4-91cd-eb1045b4a56e\") " Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.926238 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-utilities" (OuterVolumeSpecName: "utilities") pod "05691e86-e66f-45a4-91cd-eb1045b4a56e" (UID: "05691e86-e66f-45a4-91cd-eb1045b4a56e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.929980 4685 scope.go:117] "RemoveContainer" containerID="1b3d05ab7cdf160764d09c38abbfdc98c855564eed358a6cbea9a0497c45edda" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.930149 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.930182 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.930196 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qtk7\" (UniqueName: \"kubernetes.io/projected/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-kube-api-access-2qtk7\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.930211 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6418701b-a844-4b1c-8b5e-07e1b8a3faf6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.930941 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05691e86-e66f-45a4-91cd-eb1045b4a56e-kube-api-access-q8gtg" (OuterVolumeSpecName: "kube-api-access-q8gtg") pod "05691e86-e66f-45a4-91cd-eb1045b4a56e" (UID: "05691e86-e66f-45a4-91cd-eb1045b4a56e"). InnerVolumeSpecName "kube-api-access-q8gtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:04:44 crc kubenswrapper[4685]: I1202 10:04:44.948341 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05691e86-e66f-45a4-91cd-eb1045b4a56e" (UID: "05691e86-e66f-45a4-91cd-eb1045b4a56e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.010419 4685 scope.go:117] "RemoveContainer" containerID="63a7e0451c8ea64bf36cc3ce34c10434b697b1428cd739296447e7cccd625347" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.024057 4685 scope.go:117] "RemoveContainer" containerID="115fae1201bf7b97942cf4476480f2dc7dea036d92368b63640550eac7c33b5a" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.031665 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-utilities\") pod \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.031723 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-catalog-content\") pod \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.031757 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9znt\" (UniqueName: \"kubernetes.io/projected/780a97cc-9fca-4b67-a956-8be8a6eb3d08-kube-api-access-p9znt\") pod \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\" (UID: \"780a97cc-9fca-4b67-a956-8be8a6eb3d08\") " Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.032093 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05691e86-e66f-45a4-91cd-eb1045b4a56e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.032107 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8gtg\" (UniqueName: \"kubernetes.io/projected/05691e86-e66f-45a4-91cd-eb1045b4a56e-kube-api-access-q8gtg\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.034422 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-utilities" (OuterVolumeSpecName: "utilities") pod "780a97cc-9fca-4b67-a956-8be8a6eb3d08" (UID: "780a97cc-9fca-4b67-a956-8be8a6eb3d08"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.045088 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780a97cc-9fca-4b67-a956-8be8a6eb3d08-kube-api-access-p9znt" (OuterVolumeSpecName: "kube-api-access-p9znt") pod "780a97cc-9fca-4b67-a956-8be8a6eb3d08" (UID: "780a97cc-9fca-4b67-a956-8be8a6eb3d08"). InnerVolumeSpecName "kube-api-access-p9znt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.083008 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "780a97cc-9fca-4b67-a956-8be8a6eb3d08" (UID: "780a97cc-9fca-4b67-a956-8be8a6eb3d08"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.114675 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.133257 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.133290 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/780a97cc-9fca-4b67-a956-8be8a6eb3d08-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.133300 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9znt\" (UniqueName: \"kubernetes.io/projected/780a97cc-9fca-4b67-a956-8be8a6eb3d08-kube-api-access-p9znt\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.234617 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-var-lock\") pod \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.234724 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kube-api-access\") pod \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.234753 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-var-lock" (OuterVolumeSpecName: "var-lock") pod "659d9a5f-88ad-480f-88d4-24d9dda24b3e" (UID: "659d9a5f-88ad-480f-88d4-24d9dda24b3e"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.234781 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kubelet-dir\") pod \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\" (UID: \"659d9a5f-88ad-480f-88d4-24d9dda24b3e\") " Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.234891 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "659d9a5f-88ad-480f-88d4-24d9dda24b3e" (UID: "659d9a5f-88ad-480f-88d4-24d9dda24b3e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.235414 4685 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.235446 4685 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.240414 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "659d9a5f-88ad-480f-88d4-24d9dda24b3e" (UID: "659d9a5f-88ad-480f-88d4-24d9dda24b3e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.337019 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/659d9a5f-88ad-480f-88d4-24d9dda24b3e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.899381 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.900653 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1" exitCode=0 Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.904148 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.907623 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-8vsrk_780a97cc-9fca-4b67-a956-8be8a6eb3d08/registry-server/0.log" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.909018 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8vsrk" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.914294 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"659d9a5f-88ad-480f-88d4-24d9dda24b3e","Type":"ContainerDied","Data":"6e73d2c799c314daa635f314568b2ce3d6920ace21053270694c49c935ee1497"} Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.914335 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e73d2c799c314daa635f314568b2ce3d6920ace21053270694c49c935ee1497" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.914350 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8vsrk" event={"ID":"780a97cc-9fca-4b67-a956-8be8a6eb3d08","Type":"ContainerDied","Data":"6f64821112ec03f11188b66586fee1b7b2a3a5fa246c4528d3ce8f6bc3ebe2ef"} Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.914371 4685 scope.go:117] "RemoveContainer" containerID="ad564476d6e8c96dd4406eeee6475114866ad9a8a1cba1c9ff8cc170581be9e1" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.938553 4685 scope.go:117] "RemoveContainer" containerID="202b984a281911f12300d840d7cb32b23e2c2a6394f6027c3af032b0fa054626" Dec 02 10:04:45 crc kubenswrapper[4685]: I1202 10:04:45.957329 4685 scope.go:117] "RemoveContainer" containerID="9d0c6151479033c121164abfa2b0c3e31fcf7ac291cd78d6ffd9c1bf937735d5" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.493823 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.495117 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.553292 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.553605 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.553704 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.553396 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.553677 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.553793 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.554098 4685 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.554175 4685 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.554239 4685 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.920994 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.922552 4685 scope.go:117] "RemoveContainer" containerID="06371aca8f40edf6b9cfe4be2ac99fae9780a6e0a893952e0187b5b236ee1243" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.922657 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.938589 4685 scope.go:117] "RemoveContainer" containerID="ab5664905d718b8c0936fc2c4a22aec35d6e2fc19b23b2642db8a9eb96cd8536" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.953331 4685 scope.go:117] "RemoveContainer" containerID="994c977139449142f019154f10c3514caa0d38eed623e2497ce39a2ed09797cd" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.967202 4685 scope.go:117] "RemoveContainer" containerID="d9575b8e057f09f84a39036ff2423fee4d26c568f243d9841b935e79cc005a5b" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.984475 4685 scope.go:117] "RemoveContainer" containerID="f06e05f0e4395bb48c3b9434b03a2e3f2d7d0bc4c20c68c69559120420d904a1" Dec 02 10:04:46 crc kubenswrapper[4685]: I1202 10:04:46.998937 4685 scope.go:117] "RemoveContainer" containerID="fff01954e2f3f279c07d9fe6b6192918b5ecba525ceec4609cab3876bec0e918" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.638110 4685 status_manager.go:851] "Failed to get status for pod" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" pod="openshift-marketplace/redhat-marketplace-xd9bz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xd9bz\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: E1202 10:04:47.638109 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.150:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-xd9bz.187d5de89c4f9033 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-xd9bz,UID:05691e86-e66f-45a4-91cd-eb1045b4a56e,APIVersion:v1,ResourceVersion:28419,FieldPath:spec.containers{registry-server},},Reason:Killing,Message:Stopping container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 10:04:42.635358259 +0000 UTC m=+175.007132413,LastTimestamp:2025-12-02 10:04:42.635358259 +0000 UTC m=+175.007132413,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.638903 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.639926 4685 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.640315 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.640955 4685 status_manager.go:851] "Failed to get status for pod" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" pod="openshift-marketplace/community-operators-8vsrk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8vsrk\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.641731 4685 status_manager.go:851] "Failed to get status for pod" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" pod="openshift-marketplace/redhat-marketplace-xd9bz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xd9bz\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.642352 4685 status_manager.go:851] "Failed to get status for pod" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.642762 4685 status_manager.go:851] "Failed to get status for pod" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" pod="openshift-marketplace/certified-operators-p94sb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-p94sb\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.901996 4685 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.902255 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.902495 4685 status_manager.go:851] "Failed to get status for pod" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" pod="openshift-marketplace/community-operators-8vsrk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8vsrk\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.903078 4685 status_manager.go:851] "Failed to get status for pod" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" pod="openshift-marketplace/redhat-marketplace-xd9bz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xd9bz\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.903585 4685 status_manager.go:851] "Failed to get status for pod" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.904034 4685 status_manager.go:851] "Failed to get status for pod" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" pod="openshift-marketplace/certified-operators-p94sb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-p94sb\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:47 crc kubenswrapper[4685]: I1202 10:04:47.906254 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 02 10:04:49 crc kubenswrapper[4685]: E1202 10:04:49.569823 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:04:49Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:04:49Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:04:49Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-02T10:04:49Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:49 crc kubenswrapper[4685]: E1202 10:04:49.570429 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:49 crc kubenswrapper[4685]: E1202 10:04:49.570788 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:49 crc kubenswrapper[4685]: E1202 10:04:49.570952 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:49 crc kubenswrapper[4685]: E1202 10:04:49.571086 4685 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:49 crc kubenswrapper[4685]: E1202 10:04:49.571098 4685 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 02 10:04:50 crc kubenswrapper[4685]: E1202 10:04:50.529774 4685 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.150:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-xd9bz.187d5de89c4f9033 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-xd9bz,UID:05691e86-e66f-45a4-91cd-eb1045b4a56e,APIVersion:v1,ResourceVersion:28419,FieldPath:spec.containers{registry-server},},Reason:Killing,Message:Stopping container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-02 10:04:42.635358259 +0000 UTC m=+175.007132413,LastTimestamp:2025-12-02 10:04:42.635358259 +0000 UTC m=+175.007132413,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.898890 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.900143 4685 status_manager.go:851] "Failed to get status for pod" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" pod="openshift-marketplace/community-operators-8vsrk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8vsrk\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.900752 4685 status_manager.go:851] "Failed to get status for pod" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" pod="openshift-marketplace/redhat-marketplace-xd9bz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xd9bz\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.901061 4685 status_manager.go:851] "Failed to get status for pod" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.901325 4685 status_manager.go:851] "Failed to get status for pod" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" pod="openshift-marketplace/certified-operators-p94sb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-p94sb\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.901631 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.912244 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.912280 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:50 crc kubenswrapper[4685]: E1202 10:04:50.912778 4685 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.913225 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:50 crc kubenswrapper[4685]: W1202 10:04:50.932144 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-67a3b71eac7339235bb93897ede848afa9a413f9e6b0b87ca0cf6268ef1c19d1 WatchSource:0}: Error finding container 67a3b71eac7339235bb93897ede848afa9a413f9e6b0b87ca0cf6268ef1c19d1: Status 404 returned error can't find the container with id 67a3b71eac7339235bb93897ede848afa9a413f9e6b0b87ca0cf6268ef1c19d1 Dec 02 10:04:50 crc kubenswrapper[4685]: I1202 10:04:50.941723 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"67a3b71eac7339235bb93897ede848afa9a413f9e6b0b87ca0cf6268ef1c19d1"} Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.951113 4685 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="b0607bd1ace4c94c42cfb53bb0d4968bbc1b80eee54cd49e71f4c07a4c2a4863" exitCode=0 Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.951407 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.951206 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"b0607bd1ace4c94c42cfb53bb0d4968bbc1b80eee54cd49e71f4c07a4c2a4863"} Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.951424 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:51 crc kubenswrapper[4685]: E1202 10:04:51.951871 4685 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.951988 4685 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.952514 4685 status_manager.go:851] "Failed to get status for pod" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" pod="openshift-marketplace/community-operators-8vsrk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8vsrk\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.952909 4685 status_manager.go:851] "Failed to get status for pod" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" pod="openshift-marketplace/redhat-marketplace-xd9bz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-xd9bz\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.953310 4685 status_manager.go:851] "Failed to get status for pod" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:51 crc kubenswrapper[4685]: I1202 10:04:51.953653 4685 status_manager.go:851] "Failed to get status for pod" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" pod="openshift-marketplace/certified-operators-p94sb" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-p94sb\": dial tcp 38.102.83.150:6443: connect: connection refused" Dec 02 10:04:52 crc kubenswrapper[4685]: I1202 10:04:52.960265 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"aa87fd77e36f743d1540b1537be74633308983f97f7b672f1ca163d0c0fecf96"} Dec 02 10:04:52 crc kubenswrapper[4685]: I1202 10:04:52.960520 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"97253c8398c202379f6bb54f009f814bbd02f76a951fc438dfcf6cfa665addce"} Dec 02 10:04:53 crc kubenswrapper[4685]: I1202 10:04:53.973723 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1dd487eecadb330a22f6e94b068ecca43e836e3fbe9833021a005c0e19f6851c"} Dec 02 10:04:53 crc kubenswrapper[4685]: I1202 10:04:53.973776 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"748f5534049d1866171250dbe916a7a0f012d4a883c95ece8ce815671cfe1875"} Dec 02 10:04:54 crc kubenswrapper[4685]: I1202 10:04:54.981048 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"be714b5b1b0f1356e92e2f2501fa6bf5cd27990d6725303a053a55a3ea4d053c"} Dec 02 10:04:54 crc kubenswrapper[4685]: I1202 10:04:54.981190 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:54 crc kubenswrapper[4685]: I1202 10:04:54.981291 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:54 crc kubenswrapper[4685]: I1202 10:04:54.981308 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:54 crc kubenswrapper[4685]: I1202 10:04:54.988355 4685 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.913595 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.914171 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.921633 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.989147 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.989197 4685 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee" exitCode=1 Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.989261 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee"} Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.989572 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.989588 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.990131 4685 scope.go:117] "RemoveContainer" containerID="96b859036c2b60de2b901845528019c654caeceb03a8a82d3572ae445e5ebeee" Dec 02 10:04:55 crc kubenswrapper[4685]: I1202 10:04:55.999834 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:04:56 crc kubenswrapper[4685]: I1202 10:04:56.996302 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 02 10:04:56 crc kubenswrapper[4685]: I1202 10:04:56.996370 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"18ec97cb400d4747a181945bea8e5d2681bfcd038d7ac3d6e8d9b5b1437468e6"} Dec 02 10:04:56 crc kubenswrapper[4685]: I1202 10:04:56.996616 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:56 crc kubenswrapper[4685]: I1202 10:04:56.996630 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:57 crc kubenswrapper[4685]: I1202 10:04:57.917904 4685 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fb4744ac-f4b8-4d9f-8ab5-fcda48507727" Dec 02 10:04:58 crc kubenswrapper[4685]: I1202 10:04:58.000463 4685 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:58 crc kubenswrapper[4685]: I1202 10:04:58.000493 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="6403fe5e-b48b-47ed-ad54-0c1a89a58899" Dec 02 10:04:58 crc kubenswrapper[4685]: I1202 10:04:58.009000 4685 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fb4744ac-f4b8-4d9f-8ab5-fcda48507727" Dec 02 10:04:59 crc kubenswrapper[4685]: I1202 10:04:59.284461 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:05:03 crc kubenswrapper[4685]: I1202 10:05:03.461314 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 02 10:05:03 crc kubenswrapper[4685]: I1202 10:05:03.530787 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:05:03 crc kubenswrapper[4685]: I1202 10:05:03.531064 4685 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 02 10:05:03 crc kubenswrapper[4685]: I1202 10:05:03.531100 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 02 10:05:04 crc kubenswrapper[4685]: I1202 10:05:04.065063 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 02 10:05:04 crc kubenswrapper[4685]: I1202 10:05:04.153326 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 02 10:05:04 crc kubenswrapper[4685]: I1202 10:05:04.552600 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 02 10:05:04 crc kubenswrapper[4685]: I1202 10:05:04.617184 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.094883 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.147527 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.231829 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.246893 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.277076 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.417296 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.454597 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.475330 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.550244 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.567699 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.571983 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.573416 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.600445 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.705171 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.808478 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.857545 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.988535 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 02 10:05:05 crc kubenswrapper[4685]: I1202 10:05:05.990324 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.003690 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.048678 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.060157 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.070038 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.193184 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.300615 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.314110 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.331860 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.371145 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.417987 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.424542 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.427858 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.533807 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.559254 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.634858 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.661315 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.703627 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.723193 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.734439 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.735987 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.771873 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.774945 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.880243 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.886338 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.928505 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.933788 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.945258 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.947692 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.967248 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.995807 4685 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 02 10:05:06 crc kubenswrapper[4685]: I1202 10:05:06.999084 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=24.999062226 podStartE2EDuration="24.999062226s" podCreationTimestamp="2025-12-02 10:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:04:55.006847974 +0000 UTC m=+187.378622128" watchObservedRunningTime="2025-12-02 10:05:06.999062226 +0000 UTC m=+199.370836380" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.001637 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xd9bz","openshift-marketplace/community-operators-8vsrk","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-p94sb"] Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.001721 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.002697 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.005792 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.006448 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.025329 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=13.025303856 podStartE2EDuration="13.025303856s" podCreationTimestamp="2025-12-02 10:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:05:07.019949897 +0000 UTC m=+199.391724091" watchObservedRunningTime="2025-12-02 10:05:07.025303856 +0000 UTC m=+199.397078050" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.032500 4685 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.049723 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.054576 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.078550 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.090412 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.104545 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.109624 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.123784 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.157551 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.197434 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.209687 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.210672 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.216496 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.254308 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.270282 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.283641 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.333883 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.343259 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.455171 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.459552 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.465636 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.484730 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.570109 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.689484 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.692154 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.710927 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.710964 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.716491 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.761859 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.762263 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.765973 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.775708 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.900822 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.905845 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" path="/var/lib/kubelet/pods/05691e86-e66f-45a4-91cd-eb1045b4a56e/volumes" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.906423 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" path="/var/lib/kubelet/pods/6418701b-a844-4b1c-8b5e-07e1b8a3faf6/volumes" Dec 02 10:05:07 crc kubenswrapper[4685]: I1202 10:05:07.906968 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" path="/var/lib/kubelet/pods/780a97cc-9fca-4b67-a956-8be8a6eb3d08/volumes" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.034122 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.063847 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.063847 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.158838 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.162473 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.277636 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.285202 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.304698 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.323844 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.327916 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.349703 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.476307 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.515630 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.545540 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.650974 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.664600 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.689322 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.702889 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.746979 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.748803 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.776910 4685 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.777139 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://8b7fd2a0913512c609c8065c457e1f893021919e27d8b2306f4b1ac37cb4a22e" gracePeriod=5 Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.847998 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.913192 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.917800 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.977824 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 02 10:05:08 crc kubenswrapper[4685]: I1202 10:05:08.981503 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.050303 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.050773 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.070321 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.081132 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.082592 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.092710 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.112048 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.119653 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.129401 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.176541 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.184111 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.281933 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.282312 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.350475 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.414893 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.437300 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.505025 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.561375 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.578682 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.590006 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.637055 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.661923 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.668174 4685 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.670173 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.705981 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.728020 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.767047 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.773492 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.774587 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.782071 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.807418 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.857644 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 02 10:05:09 crc kubenswrapper[4685]: I1202 10:05:09.876860 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.008548 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.059926 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.078588 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.081984 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.082776 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.098194 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.147823 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.154444 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.159465 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.200658 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.235825 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.259584 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.287406 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.313396 4685 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.325246 4685 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.345378 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.363130 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.455686 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.465487 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.529758 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.735738 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.742430 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.775865 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.802009 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.923891 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.925269 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 02 10:05:10 crc kubenswrapper[4685]: I1202 10:05:10.979799 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.043262 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.072208 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.141731 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.145449 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.185493 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.258204 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.267843 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.270901 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.280203 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.282416 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.338105 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.398753 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.499427 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.568980 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.586632 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.619686 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.671728 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.750125 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.832960 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 02 10:05:11 crc kubenswrapper[4685]: I1202 10:05:11.980860 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.107199 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.115809 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.133669 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.147821 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.147891 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.147942 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.148491 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.148603 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e" gracePeriod=600 Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.173892 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.216479 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.294423 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 02 10:05:12 crc kubenswrapper[4685]: I1202 10:05:12.394354 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 02 10:05:13 crc kubenswrapper[4685]: I1202 10:05:13.075901 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e" exitCode=0 Dec 02 10:05:13 crc kubenswrapper[4685]: I1202 10:05:13.075947 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e"} Dec 02 10:05:13 crc kubenswrapper[4685]: I1202 10:05:13.075978 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"0071a28d3e21c488be2b1d07993acd33dbd116d8424d85af9c0eda9381b4c6d4"} Dec 02 10:05:13 crc kubenswrapper[4685]: I1202 10:05:13.535909 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:05:13 crc kubenswrapper[4685]: I1202 10:05:13.542478 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.083708 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.083765 4685 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="8b7fd2a0913512c609c8065c457e1f893021919e27d8b2306f4b1ac37cb4a22e" exitCode=137 Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.346878 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.347015 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.528259 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541296 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541373 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541408 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541428 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541490 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541550 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541623 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541796 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.541842 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.542018 4685 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.542042 4685 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.542051 4685 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.542061 4685 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.549440 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.550945 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.605418 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 02 10:05:14 crc kubenswrapper[4685]: I1202 10:05:14.642784 4685 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.089792 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.089849 4685 scope.go:117] "RemoveContainer" containerID="8b7fd2a0913512c609c8065c457e1f893021919e27d8b2306f4b1ac37cb4a22e" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.089888 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.257917 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.694545 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.906903 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.907150 4685 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.917745 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.917801 4685 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="663cc8ce-2a96-4bb1-b6ce-1d73f8a41e93" Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.921144 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 02 10:05:15 crc kubenswrapper[4685]: I1202 10:05:15.921194 4685 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="663cc8ce-2a96-4bb1-b6ce-1d73f8a41e93" Dec 02 10:05:16 crc kubenswrapper[4685]: I1202 10:05:16.557935 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 02 10:05:16 crc kubenswrapper[4685]: I1202 10:05:16.659243 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 02 10:05:16 crc kubenswrapper[4685]: I1202 10:05:16.957017 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 02 10:05:17 crc kubenswrapper[4685]: I1202 10:05:17.372211 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 10:05:17 crc kubenswrapper[4685]: I1202 10:05:17.546482 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 02 10:05:17 crc kubenswrapper[4685]: I1202 10:05:17.556186 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 02 10:05:17 crc kubenswrapper[4685]: I1202 10:05:17.870140 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 02 10:05:17 crc kubenswrapper[4685]: I1202 10:05:17.931883 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 02 10:05:18 crc kubenswrapper[4685]: I1202 10:05:18.216831 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 02 10:05:18 crc kubenswrapper[4685]: I1202 10:05:18.409758 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 02 10:05:18 crc kubenswrapper[4685]: I1202 10:05:18.567299 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 02 10:05:18 crc kubenswrapper[4685]: I1202 10:05:18.725691 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 02 10:05:18 crc kubenswrapper[4685]: I1202 10:05:18.855910 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 02 10:05:19 crc kubenswrapper[4685]: I1202 10:05:19.009535 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 02 10:05:19 crc kubenswrapper[4685]: I1202 10:05:19.103083 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 02 10:05:19 crc kubenswrapper[4685]: I1202 10:05:19.179986 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 02 10:05:19 crc kubenswrapper[4685]: I1202 10:05:19.689998 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 02 10:05:19 crc kubenswrapper[4685]: I1202 10:05:19.794086 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 02 10:05:20 crc kubenswrapper[4685]: I1202 10:05:20.252177 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 02 10:05:20 crc kubenswrapper[4685]: I1202 10:05:20.384854 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 02 10:05:20 crc kubenswrapper[4685]: I1202 10:05:20.508710 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 02 10:05:20 crc kubenswrapper[4685]: I1202 10:05:20.527006 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 02 10:05:20 crc kubenswrapper[4685]: I1202 10:05:20.851295 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 02 10:05:20 crc kubenswrapper[4685]: I1202 10:05:20.855466 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 02 10:05:21 crc kubenswrapper[4685]: I1202 10:05:21.067657 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 02 10:05:21 crc kubenswrapper[4685]: I1202 10:05:21.489730 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 02 10:05:21 crc kubenswrapper[4685]: I1202 10:05:21.494392 4685 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 02 10:05:21 crc kubenswrapper[4685]: I1202 10:05:21.648721 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 02 10:05:22 crc kubenswrapper[4685]: I1202 10:05:22.164718 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 02 10:05:22 crc kubenswrapper[4685]: I1202 10:05:22.226080 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 02 10:05:22 crc kubenswrapper[4685]: I1202 10:05:22.423275 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 02 10:05:22 crc kubenswrapper[4685]: I1202 10:05:22.599021 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 02 10:05:22 crc kubenswrapper[4685]: I1202 10:05:22.626093 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 02 10:05:22 crc kubenswrapper[4685]: I1202 10:05:22.849880 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 02 10:05:23 crc kubenswrapper[4685]: I1202 10:05:23.387372 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 02 10:05:23 crc kubenswrapper[4685]: I1202 10:05:23.870203 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 02 10:05:24 crc kubenswrapper[4685]: I1202 10:05:24.138621 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 02 10:05:24 crc kubenswrapper[4685]: I1202 10:05:24.474750 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 02 10:05:24 crc kubenswrapper[4685]: I1202 10:05:24.528638 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 02 10:05:25 crc kubenswrapper[4685]: I1202 10:05:25.124600 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 02 10:05:25 crc kubenswrapper[4685]: I1202 10:05:25.406402 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 02 10:05:25 crc kubenswrapper[4685]: I1202 10:05:25.543127 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 02 10:05:25 crc kubenswrapper[4685]: I1202 10:05:25.581639 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 02 10:05:26 crc kubenswrapper[4685]: I1202 10:05:26.137643 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 02 10:05:26 crc kubenswrapper[4685]: I1202 10:05:26.181365 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 02 10:05:27 crc kubenswrapper[4685]: I1202 10:05:27.788249 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.333470 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.865343 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-96v4q"] Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.865591 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-96v4q" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="registry-server" containerID="cri-o://7fafb5fc66f6d4c7e4cb01d456fc8cca0c89d5873f063ab8f9768c5fb0a06afb" gracePeriod=30 Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.869054 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6kndg"] Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.869349 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6kndg" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="registry-server" containerID="cri-o://eebeb371f7ae7a168d366e355ea77238c52e44cadd08024b3265ff448f5c3da4" gracePeriod=30 Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.877233 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xf986"] Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.877654 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" containerID="cri-o://fc3a9529c19e07b8d80cc79c192cd0d2e55ee4a14930a3ee7094eb774549da27" gracePeriod=30 Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.887701 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj8bb"] Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.887936 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zj8bb" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="registry-server" containerID="cri-o://76a821d0d0452aa117685020a16ac06a6e01ecd929d247493512ae3d5af2dfda" gracePeriod=30 Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.903948 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lf7qs"] Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.904152 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lf7qs" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="registry-server" containerID="cri-o://cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f" gracePeriod=30 Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.910174 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rp5qc"] Dec 02 10:05:28 crc kubenswrapper[4685]: I1202 10:05:28.910373 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rp5qc" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="registry-server" containerID="cri-o://a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4" gracePeriod=30 Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.182823 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f is running failed: container process not found" containerID="cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.183302 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f is running failed: container process not found" containerID="cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.183654 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f is running failed: container process not found" containerID="cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.183687 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-lf7qs" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="registry-server" Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.545135 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4 is running failed: container process not found" containerID="a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.545605 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4 is running failed: container process not found" containerID="a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.545815 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4 is running failed: container process not found" containerID="a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:05:29 crc kubenswrapper[4685]: E1202 10:05:29.545851 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-rp5qc" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="registry-server" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.175605 4685 generic.go:334] "Generic (PLEG): container finished" podID="940db863-41cb-461b-ab9f-80663af52acf" containerID="cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f" exitCode=0 Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.175683 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerDied","Data":"cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f"} Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.178039 4685 generic.go:334] "Generic (PLEG): container finished" podID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerID="eebeb371f7ae7a168d366e355ea77238c52e44cadd08024b3265ff448f5c3da4" exitCode=0 Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.178094 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerDied","Data":"eebeb371f7ae7a168d366e355ea77238c52e44cadd08024b3265ff448f5c3da4"} Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.180292 4685 generic.go:334] "Generic (PLEG): container finished" podID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerID="a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4" exitCode=0 Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.180349 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerDied","Data":"a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4"} Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.183028 4685 generic.go:334] "Generic (PLEG): container finished" podID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerID="76a821d0d0452aa117685020a16ac06a6e01ecd929d247493512ae3d5af2dfda" exitCode=0 Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.183083 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerDied","Data":"76a821d0d0452aa117685020a16ac06a6e01ecd929d247493512ae3d5af2dfda"} Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.186932 4685 generic.go:334] "Generic (PLEG): container finished" podID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerID="7fafb5fc66f6d4c7e4cb01d456fc8cca0c89d5873f063ab8f9768c5fb0a06afb" exitCode=0 Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.187011 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96v4q" event={"ID":"2c40690c-d875-4eef-9c0c-0a174aa43ca8","Type":"ContainerDied","Data":"7fafb5fc66f6d4c7e4cb01d456fc8cca0c89d5873f063ab8f9768c5fb0a06afb"} Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.188771 4685 generic.go:334] "Generic (PLEG): container finished" podID="370d38c8-640e-44bd-a095-75fb04be6320" containerID="fc3a9529c19e07b8d80cc79c192cd0d2e55ee4a14930a3ee7094eb774549da27" exitCode=0 Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.188803 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" event={"ID":"370d38c8-640e-44bd-a095-75fb04be6320","Type":"ContainerDied","Data":"fc3a9529c19e07b8d80cc79c192cd0d2e55ee4a14930a3ee7094eb774549da27"} Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.506166 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.553902 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-catalog-content\") pod \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.553956 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-utilities\") pod \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.553997 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5l4l\" (UniqueName: \"kubernetes.io/projected/2c40690c-d875-4eef-9c0c-0a174aa43ca8-kube-api-access-z5l4l\") pod \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\" (UID: \"2c40690c-d875-4eef-9c0c-0a174aa43ca8\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.557580 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-utilities" (OuterVolumeSpecName: "utilities") pod "2c40690c-d875-4eef-9c0c-0a174aa43ca8" (UID: "2c40690c-d875-4eef-9c0c-0a174aa43ca8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.569861 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c40690c-d875-4eef-9c0c-0a174aa43ca8-kube-api-access-z5l4l" (OuterVolumeSpecName: "kube-api-access-z5l4l") pod "2c40690c-d875-4eef-9c0c-0a174aa43ca8" (UID: "2c40690c-d875-4eef-9c0c-0a174aa43ca8"). InnerVolumeSpecName "kube-api-access-z5l4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.656143 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.657059 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5l4l\" (UniqueName: \"kubernetes.io/projected/2c40690c-d875-4eef-9c0c-0a174aa43ca8-kube-api-access-z5l4l\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.662006 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c40690c-d875-4eef-9c0c-0a174aa43ca8" (UID: "2c40690c-d875-4eef-9c0c-0a174aa43ca8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.670302 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.673738 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.682309 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.692489 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.700655 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.757942 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bznjs\" (UniqueName: \"kubernetes.io/projected/9ea11cb0-f32a-4346-b195-ce6c7176553b-kube-api-access-bznjs\") pod \"9ea11cb0-f32a-4346-b195-ce6c7176553b\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.757983 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z44ms\" (UniqueName: \"kubernetes.io/projected/370d38c8-640e-44bd-a095-75fb04be6320-kube-api-access-z44ms\") pod \"370d38c8-640e-44bd-a095-75fb04be6320\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758014 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-catalog-content\") pod \"4eddbb2b-7286-4117-97fd-f915638d19cd\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758037 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-catalog-content\") pod \"9ea11cb0-f32a-4346-b195-ce6c7176553b\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758058 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-utilities\") pod \"940db863-41cb-461b-ab9f-80663af52acf\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758076 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-utilities\") pod \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758136 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsk4t\" (UniqueName: \"kubernetes.io/projected/4eddbb2b-7286-4117-97fd-f915638d19cd-kube-api-access-hsk4t\") pod \"4eddbb2b-7286-4117-97fd-f915638d19cd\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758172 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-utilities\") pod \"9ea11cb0-f32a-4346-b195-ce6c7176553b\" (UID: \"9ea11cb0-f32a-4346-b195-ce6c7176553b\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758201 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-utilities\") pod \"4eddbb2b-7286-4117-97fd-f915638d19cd\" (UID: \"4eddbb2b-7286-4117-97fd-f915638d19cd\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758220 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-catalog-content\") pod \"940db863-41cb-461b-ab9f-80663af52acf\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758240 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-operator-metrics\") pod \"370d38c8-640e-44bd-a095-75fb04be6320\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758255 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-catalog-content\") pod \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758277 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4pwf\" (UniqueName: \"kubernetes.io/projected/940db863-41cb-461b-ab9f-80663af52acf-kube-api-access-c4pwf\") pod \"940db863-41cb-461b-ab9f-80663af52acf\" (UID: \"940db863-41cb-461b-ab9f-80663af52acf\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758295 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-trusted-ca\") pod \"370d38c8-640e-44bd-a095-75fb04be6320\" (UID: \"370d38c8-640e-44bd-a095-75fb04be6320\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758323 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dvbl\" (UniqueName: \"kubernetes.io/projected/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-kube-api-access-4dvbl\") pod \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\" (UID: \"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017\") " Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.758497 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c40690c-d875-4eef-9c0c-0a174aa43ca8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.760738 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-utilities" (OuterVolumeSpecName: "utilities") pod "4eddbb2b-7286-4117-97fd-f915638d19cd" (UID: "4eddbb2b-7286-4117-97fd-f915638d19cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.761546 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-utilities" (OuterVolumeSpecName: "utilities") pod "9ea11cb0-f32a-4346-b195-ce6c7176553b" (UID: "9ea11cb0-f32a-4346-b195-ce6c7176553b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.769677 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/940db863-41cb-461b-ab9f-80663af52acf-kube-api-access-c4pwf" (OuterVolumeSpecName: "kube-api-access-c4pwf") pod "940db863-41cb-461b-ab9f-80663af52acf" (UID: "940db863-41cb-461b-ab9f-80663af52acf"). InnerVolumeSpecName "kube-api-access-c4pwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.773458 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "370d38c8-640e-44bd-a095-75fb04be6320" (UID: "370d38c8-640e-44bd-a095-75fb04be6320"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.775800 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ea11cb0-f32a-4346-b195-ce6c7176553b-kube-api-access-bznjs" (OuterVolumeSpecName: "kube-api-access-bznjs") pod "9ea11cb0-f32a-4346-b195-ce6c7176553b" (UID: "9ea11cb0-f32a-4346-b195-ce6c7176553b"). InnerVolumeSpecName "kube-api-access-bznjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.775999 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/370d38c8-640e-44bd-a095-75fb04be6320-kube-api-access-z44ms" (OuterVolumeSpecName: "kube-api-access-z44ms") pod "370d38c8-640e-44bd-a095-75fb04be6320" (UID: "370d38c8-640e-44bd-a095-75fb04be6320"). InnerVolumeSpecName "kube-api-access-z44ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.777212 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-utilities" (OuterVolumeSpecName: "utilities") pod "37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" (UID: "37ef1a3f-a5f2-4ec7-b996-2c8bacbac017"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.778058 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "370d38c8-640e-44bd-a095-75fb04be6320" (UID: "370d38c8-640e-44bd-a095-75fb04be6320"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.780834 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-utilities" (OuterVolumeSpecName: "utilities") pod "940db863-41cb-461b-ab9f-80663af52acf" (UID: "940db863-41cb-461b-ab9f-80663af52acf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.795733 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eddbb2b-7286-4117-97fd-f915638d19cd-kube-api-access-hsk4t" (OuterVolumeSpecName: "kube-api-access-hsk4t") pod "4eddbb2b-7286-4117-97fd-f915638d19cd" (UID: "4eddbb2b-7286-4117-97fd-f915638d19cd"). InnerVolumeSpecName "kube-api-access-hsk4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.806971 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-kube-api-access-4dvbl" (OuterVolumeSpecName: "kube-api-access-4dvbl") pod "37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" (UID: "37ef1a3f-a5f2-4ec7-b996-2c8bacbac017"). InnerVolumeSpecName "kube-api-access-4dvbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.812287 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4eddbb2b-7286-4117-97fd-f915638d19cd" (UID: "4eddbb2b-7286-4117-97fd-f915638d19cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859888 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z44ms\" (UniqueName: \"kubernetes.io/projected/370d38c8-640e-44bd-a095-75fb04be6320-kube-api-access-z44ms\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859932 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859945 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859961 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859974 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsk4t\" (UniqueName: \"kubernetes.io/projected/4eddbb2b-7286-4117-97fd-f915638d19cd-kube-api-access-hsk4t\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859988 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.859999 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4eddbb2b-7286-4117-97fd-f915638d19cd-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.860013 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.860025 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4pwf\" (UniqueName: \"kubernetes.io/projected/940db863-41cb-461b-ab9f-80663af52acf-kube-api-access-c4pwf\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.860038 4685 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/370d38c8-640e-44bd-a095-75fb04be6320-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.860049 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dvbl\" (UniqueName: \"kubernetes.io/projected/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-kube-api-access-4dvbl\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.860060 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bznjs\" (UniqueName: \"kubernetes.io/projected/9ea11cb0-f32a-4346-b195-ce6c7176553b-kube-api-access-bznjs\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.885485 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" (UID: "37ef1a3f-a5f2-4ec7-b996-2c8bacbac017"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.967201 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.981922 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "940db863-41cb-461b-ab9f-80663af52acf" (UID: "940db863-41cb-461b-ab9f-80663af52acf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:30 crc kubenswrapper[4685]: I1202 10:05:30.983678 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ea11cb0-f32a-4346-b195-ce6c7176553b" (UID: "9ea11cb0-f32a-4346-b195-ce6c7176553b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.068165 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/940db863-41cb-461b-ab9f-80663af52acf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.068209 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ea11cb0-f32a-4346-b195-ce6c7176553b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.196951 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rp5qc" event={"ID":"9ea11cb0-f32a-4346-b195-ce6c7176553b","Type":"ContainerDied","Data":"cb7a54c6352f56cc7adf10051a337e70ce3f999ba2f2144295de213f9cf0788d"} Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.197013 4685 scope.go:117] "RemoveContainer" containerID="a528be6d7e5e57c5eff8e00a08638c47e3d109b990fd36737e4df90477f5eeb4" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.197037 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rp5qc" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.200050 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zj8bb" event={"ID":"4eddbb2b-7286-4117-97fd-f915638d19cd","Type":"ContainerDied","Data":"6b435c3064fd0cf9692d5df5d2a3871aebac2752e08f64522c3dc35f40b8d8dd"} Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.200072 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zj8bb" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.202474 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hxbft"] Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203024 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203041 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.202612 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-96v4q" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203054 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203062 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203073 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203080 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203091 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203098 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203107 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203113 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203124 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203131 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203138 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203144 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203155 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203161 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203174 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203181 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203192 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203202 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203210 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203219 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203230 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203238 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203249 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203256 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203265 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203273 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203284 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203291 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="extract-utilities" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203302 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203309 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203318 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203327 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203337 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203345 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203354 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203362 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203371 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" containerName="installer" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203379 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" containerName="installer" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203389 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203397 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203407 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203415 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203423 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203431 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203439 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203447 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203457 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203464 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="extract-content" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203475 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203484 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: E1202 10:05:31.203497 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203504 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203627 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203639 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="659d9a5f-88ad-480f-88d4-24d9dda24b3e" containerName="installer" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203647 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="940db863-41cb-461b-ab9f-80663af52acf" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203654 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6418701b-a844-4b1c-8b5e-07e1b8a3faf6" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203662 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203670 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203676 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203685 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203695 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="780a97cc-9fca-4b67-a956-8be8a6eb3d08" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203703 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="05691e86-e66f-45a4-91cd-eb1045b4a56e" containerName="registry-server" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.203711 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="370d38c8-640e-44bd-a095-75fb04be6320" containerName="marketplace-operator" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.204008 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-96v4q" event={"ID":"2c40690c-d875-4eef-9c0c-0a174aa43ca8","Type":"ContainerDied","Data":"d2ab7d04700ce9e186ee1d56614b7c2db072203ae041bf62f324a0446b34323a"} Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.204093 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.207182 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" event={"ID":"370d38c8-640e-44bd-a095-75fb04be6320","Type":"ContainerDied","Data":"7687bace00cdcf0bacaa4146bf764b6c13e0c987b28d5fd3771a12cdbc452b51"} Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.207277 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xf986" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.230587 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lf7qs" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.230583 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lf7qs" event={"ID":"940db863-41cb-461b-ab9f-80663af52acf","Type":"ContainerDied","Data":"5820c520e2e13fc5a3339073c0300ea46fad6ade6f219917350a0a997e7e0a52"} Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.234985 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hxbft"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.242898 4685 scope.go:117] "RemoveContainer" containerID="82660154c89fd907e9740908f8013dcd67410dcfd709719e58e41b446022ccf8" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.247133 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kndg" event={"ID":"37ef1a3f-a5f2-4ec7-b996-2c8bacbac017","Type":"ContainerDied","Data":"45be23419db0db4e27fd1b3cbcff5b1e08ac822306b5bb672f5daa412013646f"} Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.247225 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kndg" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.281079 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhh86\" (UniqueName: \"kubernetes.io/projected/267f61a3-d674-4855-9f76-b5c7edb14ed1-kube-api-access-qhh86\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.281284 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/267f61a3-d674-4855-9f76-b5c7edb14ed1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.281469 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/267f61a3-d674-4855-9f76-b5c7edb14ed1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.282978 4685 scope.go:117] "RemoveContainer" containerID="7310837e9b85ff4292c2016b4986172fe6f1a52bc0587f113a16e6159adb31e7" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.338346 4685 scope.go:117] "RemoveContainer" containerID="76a821d0d0452aa117685020a16ac06a6e01ecd929d247493512ae3d5af2dfda" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.342180 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xf986"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.346477 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xf986"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.363025 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj8bb"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.369265 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zj8bb"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.380928 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-96v4q"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.381454 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-96v4q"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.383956 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/267f61a3-d674-4855-9f76-b5c7edb14ed1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.384012 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/267f61a3-d674-4855-9f76-b5c7edb14ed1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.384050 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhh86\" (UniqueName: \"kubernetes.io/projected/267f61a3-d674-4855-9f76-b5c7edb14ed1-kube-api-access-qhh86\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.387573 4685 scope.go:117] "RemoveContainer" containerID="2cb1a63b6745bce9d58a44b23f96333fc1421a71b13a951f85748b050d32c409" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.387699 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/267f61a3-d674-4855-9f76-b5c7edb14ed1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.395886 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/267f61a3-d674-4855-9f76-b5c7edb14ed1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.411617 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rp5qc"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.414691 4685 scope.go:117] "RemoveContainer" containerID="e39a50d44849a14dd8a37945cb8cc0a9156180a7b5368e2d9f96a82c0218dd9b" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.418287 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rp5qc"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.435763 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lf7qs"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.438547 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lf7qs"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.442737 4685 scope.go:117] "RemoveContainer" containerID="7fafb5fc66f6d4c7e4cb01d456fc8cca0c89d5873f063ab8f9768c5fb0a06afb" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.443436 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhh86\" (UniqueName: \"kubernetes.io/projected/267f61a3-d674-4855-9f76-b5c7edb14ed1-kube-api-access-qhh86\") pod \"marketplace-operator-79b997595-hxbft\" (UID: \"267f61a3-d674-4855-9f76-b5c7edb14ed1\") " pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.458625 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6kndg"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.461649 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6kndg"] Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.494763 4685 scope.go:117] "RemoveContainer" containerID="665dbadd27133dec8b742dc50b62319008ea8d3b7058d43c788f7d1c6e5b6d35" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.536954 4685 scope.go:117] "RemoveContainer" containerID="0ce8d400793a01179fed2da6b9dffc0fc6eb3e6ea798837bbf0965e82c6e335c" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.540553 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.575242 4685 scope.go:117] "RemoveContainer" containerID="fc3a9529c19e07b8d80cc79c192cd0d2e55ee4a14930a3ee7094eb774549da27" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.608824 4685 scope.go:117] "RemoveContainer" containerID="cecf69fe14d0ddc7807dee60ad5d7ccd041f0ba8d4e1a75fe4513fdd990d8c0f" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.626544 4685 scope.go:117] "RemoveContainer" containerID="7cd1bc73b08f81ea3ed3dea711bba64069b9aa4d06ca3d18fe71f3add56660e2" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.643929 4685 scope.go:117] "RemoveContainer" containerID="1d75db56749234660a77fb3dfc7a3419fc1d7565fcc53dc190bc1b154a9b9893" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.682994 4685 scope.go:117] "RemoveContainer" containerID="eebeb371f7ae7a168d366e355ea77238c52e44cadd08024b3265ff448f5c3da4" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.712271 4685 scope.go:117] "RemoveContainer" containerID="2588a03fb55fd3b939de3549a9b001845993e9f245c11c9028d2c9948fdd5203" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.739299 4685 scope.go:117] "RemoveContainer" containerID="87bebf18c72969c2a6510f94fe9950ee5d50af692f0fbf02f9cc037e8fbe7712" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.793122 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hxbft"] Dec 02 10:05:31 crc kubenswrapper[4685]: W1202 10:05:31.805941 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod267f61a3_d674_4855_9f76_b5c7edb14ed1.slice/crio-c4640409be2a27fc516c6fbed63664b851c6bda615be9760d0e86b63826c8c82 WatchSource:0}: Error finding container c4640409be2a27fc516c6fbed63664b851c6bda615be9760d0e86b63826c8c82: Status 404 returned error can't find the container with id c4640409be2a27fc516c6fbed63664b851c6bda615be9760d0e86b63826c8c82 Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.905603 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c40690c-d875-4eef-9c0c-0a174aa43ca8" path="/var/lib/kubelet/pods/2c40690c-d875-4eef-9c0c-0a174aa43ca8/volumes" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.906647 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="370d38c8-640e-44bd-a095-75fb04be6320" path="/var/lib/kubelet/pods/370d38c8-640e-44bd-a095-75fb04be6320/volumes" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.907222 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37ef1a3f-a5f2-4ec7-b996-2c8bacbac017" path="/var/lib/kubelet/pods/37ef1a3f-a5f2-4ec7-b996-2c8bacbac017/volumes" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.908321 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eddbb2b-7286-4117-97fd-f915638d19cd" path="/var/lib/kubelet/pods/4eddbb2b-7286-4117-97fd-f915638d19cd/volumes" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.909002 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="940db863-41cb-461b-ab9f-80663af52acf" path="/var/lib/kubelet/pods/940db863-41cb-461b-ab9f-80663af52acf/volumes" Dec 02 10:05:31 crc kubenswrapper[4685]: I1202 10:05:31.910053 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ea11cb0-f32a-4346-b195-ce6c7176553b" path="/var/lib/kubelet/pods/9ea11cb0-f32a-4346-b195-ce6c7176553b/volumes" Dec 02 10:05:32 crc kubenswrapper[4685]: I1202 10:05:32.254615 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" event={"ID":"267f61a3-d674-4855-9f76-b5c7edb14ed1","Type":"ContainerStarted","Data":"bb8cf8ee27c9ec73747c41d5edba468476fa8486836bb3d934ded6955bf4154d"} Dec 02 10:05:32 crc kubenswrapper[4685]: I1202 10:05:32.254660 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" event={"ID":"267f61a3-d674-4855-9f76-b5c7edb14ed1","Type":"ContainerStarted","Data":"c4640409be2a27fc516c6fbed63664b851c6bda615be9760d0e86b63826c8c82"} Dec 02 10:05:32 crc kubenswrapper[4685]: I1202 10:05:32.255592 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:32 crc kubenswrapper[4685]: I1202 10:05:32.257086 4685 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hxbft container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Dec 02 10:05:32 crc kubenswrapper[4685]: I1202 10:05:32.257249 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" podUID="267f61a3-d674-4855-9f76-b5c7edb14ed1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Dec 02 10:05:32 crc kubenswrapper[4685]: I1202 10:05:32.280925 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" podStartSLOduration=1.280910199 podStartE2EDuration="1.280910199s" podCreationTimestamp="2025-12-02 10:05:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:05:32.278112746 +0000 UTC m=+224.649886910" watchObservedRunningTime="2025-12-02 10:05:32.280910199 +0000 UTC m=+224.652684353" Dec 02 10:05:33 crc kubenswrapper[4685]: I1202 10:05:33.273397 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hxbft" Dec 02 10:05:48 crc kubenswrapper[4685]: I1202 10:05:48.873414 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t5nfh"] Dec 02 10:05:48 crc kubenswrapper[4685]: I1202 10:05:48.874213 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" podUID="387d4a50-7633-485d-b8d3-ef2afc8c3c97" containerName="controller-manager" containerID="cri-o://cd97bd7d81d37aca5bac8b5ea221ecc10da1b3fdcae30899e413276946f56ef1" gracePeriod=30 Dec 02 10:05:48 crc kubenswrapper[4685]: I1202 10:05:48.994720 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg"] Dec 02 10:05:48 crc kubenswrapper[4685]: I1202 10:05:48.994952 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" podUID="589484c8-ce97-4e9e-b17a-58af833d9915" containerName="route-controller-manager" containerID="cri-o://9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503" gracePeriod=30 Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.345935 4685 generic.go:334] "Generic (PLEG): container finished" podID="387d4a50-7633-485d-b8d3-ef2afc8c3c97" containerID="cd97bd7d81d37aca5bac8b5ea221ecc10da1b3fdcae30899e413276946f56ef1" exitCode=0 Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.345972 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" event={"ID":"387d4a50-7633-485d-b8d3-ef2afc8c3c97","Type":"ContainerDied","Data":"cd97bd7d81d37aca5bac8b5ea221ecc10da1b3fdcae30899e413276946f56ef1"} Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.571370 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.598398 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-config\") pod \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.599243 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-config" (OuterVolumeSpecName: "config") pod "387d4a50-7633-485d-b8d3-ef2afc8c3c97" (UID: "387d4a50-7633-485d-b8d3-ef2afc8c3c97"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.599338 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/387d4a50-7633-485d-b8d3-ef2afc8c3c97-serving-cert\") pod \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.599363 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpd9r\" (UniqueName: \"kubernetes.io/projected/387d4a50-7633-485d-b8d3-ef2afc8c3c97-kube-api-access-wpd9r\") pod \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.599396 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-proxy-ca-bundles\") pod \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.599447 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-client-ca\") pod \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\" (UID: \"387d4a50-7633-485d-b8d3-ef2afc8c3c97\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.599645 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.600111 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-client-ca" (OuterVolumeSpecName: "client-ca") pod "387d4a50-7633-485d-b8d3-ef2afc8c3c97" (UID: "387d4a50-7633-485d-b8d3-ef2afc8c3c97"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.600461 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "387d4a50-7633-485d-b8d3-ef2afc8c3c97" (UID: "387d4a50-7633-485d-b8d3-ef2afc8c3c97"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.605524 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/387d4a50-7633-485d-b8d3-ef2afc8c3c97-kube-api-access-wpd9r" (OuterVolumeSpecName: "kube-api-access-wpd9r") pod "387d4a50-7633-485d-b8d3-ef2afc8c3c97" (UID: "387d4a50-7633-485d-b8d3-ef2afc8c3c97"). InnerVolumeSpecName "kube-api-access-wpd9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.605618 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/387d4a50-7633-485d-b8d3-ef2afc8c3c97-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "387d4a50-7633-485d-b8d3-ef2afc8c3c97" (UID: "387d4a50-7633-485d-b8d3-ef2afc8c3c97"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.700849 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/387d4a50-7633-485d-b8d3-ef2afc8c3c97-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.700889 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpd9r\" (UniqueName: \"kubernetes.io/projected/387d4a50-7633-485d-b8d3-ef2afc8c3c97-kube-api-access-wpd9r\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.700898 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.700907 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/387d4a50-7633-485d-b8d3-ef2afc8c3c97-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.810998 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.902936 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-client-ca\") pod \"589484c8-ce97-4e9e-b17a-58af833d9915\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.902995 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78qwp\" (UniqueName: \"kubernetes.io/projected/589484c8-ce97-4e9e-b17a-58af833d9915-kube-api-access-78qwp\") pod \"589484c8-ce97-4e9e-b17a-58af833d9915\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.903059 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-config\") pod \"589484c8-ce97-4e9e-b17a-58af833d9915\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.903119 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/589484c8-ce97-4e9e-b17a-58af833d9915-serving-cert\") pod \"589484c8-ce97-4e9e-b17a-58af833d9915\" (UID: \"589484c8-ce97-4e9e-b17a-58af833d9915\") " Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.903693 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-client-ca" (OuterVolumeSpecName: "client-ca") pod "589484c8-ce97-4e9e-b17a-58af833d9915" (UID: "589484c8-ce97-4e9e-b17a-58af833d9915"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.904393 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-config" (OuterVolumeSpecName: "config") pod "589484c8-ce97-4e9e-b17a-58af833d9915" (UID: "589484c8-ce97-4e9e-b17a-58af833d9915"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.907086 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/589484c8-ce97-4e9e-b17a-58af833d9915-kube-api-access-78qwp" (OuterVolumeSpecName: "kube-api-access-78qwp") pod "589484c8-ce97-4e9e-b17a-58af833d9915" (UID: "589484c8-ce97-4e9e-b17a-58af833d9915"). InnerVolumeSpecName "kube-api-access-78qwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:49 crc kubenswrapper[4685]: I1202 10:05:49.908953 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/589484c8-ce97-4e9e-b17a-58af833d9915-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "589484c8-ce97-4e9e-b17a-58af833d9915" (UID: "589484c8-ce97-4e9e-b17a-58af833d9915"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.004402 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.004431 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/589484c8-ce97-4e9e-b17a-58af833d9915-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.004440 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/589484c8-ce97-4e9e-b17a-58af833d9915-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.004449 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78qwp\" (UniqueName: \"kubernetes.io/projected/589484c8-ce97-4e9e-b17a-58af833d9915-kube-api-access-78qwp\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.320711 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-jr9hc"] Dec 02 10:05:50 crc kubenswrapper[4685]: E1202 10:05:50.321013 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="589484c8-ce97-4e9e-b17a-58af833d9915" containerName="route-controller-manager" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.321033 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="589484c8-ce97-4e9e-b17a-58af833d9915" containerName="route-controller-manager" Dec 02 10:05:50 crc kubenswrapper[4685]: E1202 10:05:50.321051 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="387d4a50-7633-485d-b8d3-ef2afc8c3c97" containerName="controller-manager" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.321058 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="387d4a50-7633-485d-b8d3-ef2afc8c3c97" containerName="controller-manager" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.321157 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="589484c8-ce97-4e9e-b17a-58af833d9915" containerName="route-controller-manager" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.321173 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="387d4a50-7633-485d-b8d3-ef2afc8c3c97" containerName="controller-manager" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.321523 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.323786 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.324548 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.333377 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-jr9hc"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.336615 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.352712 4685 generic.go:334] "Generic (PLEG): container finished" podID="589484c8-ce97-4e9e-b17a-58af833d9915" containerID="9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503" exitCode=0 Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.352845 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.352948 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" event={"ID":"589484c8-ce97-4e9e-b17a-58af833d9915","Type":"ContainerDied","Data":"9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503"} Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.353025 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg" event={"ID":"589484c8-ce97-4e9e-b17a-58af833d9915","Type":"ContainerDied","Data":"ac39432fa95fb9d430b4aee2ba9b6c982712bc4846af7dce17fde45c067fb942"} Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.353049 4685 scope.go:117] "RemoveContainer" containerID="9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.356154 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" event={"ID":"387d4a50-7633-485d-b8d3-ef2afc8c3c97","Type":"ContainerDied","Data":"5ed4a3dced196e61aeee9b314d7b3f420895ebeed3466af2e13ee3017cdfefca"} Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.356285 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t5nfh" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.383109 4685 scope.go:117] "RemoveContainer" containerID="9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503" Dec 02 10:05:50 crc kubenswrapper[4685]: E1202 10:05:50.385603 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503\": container with ID starting with 9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503 not found: ID does not exist" containerID="9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.385645 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503"} err="failed to get container status \"9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503\": rpc error: code = NotFound desc = could not find container \"9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503\": container with ID starting with 9f87259209a87128777120c667c1de8ad2bc2d6b346d4595bc5654674ef2e503 not found: ID does not exist" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.385685 4685 scope.go:117] "RemoveContainer" containerID="cd97bd7d81d37aca5bac8b5ea221ecc10da1b3fdcae30899e413276946f56ef1" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410017 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-client-ca\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410083 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-proxy-ca-bundles\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410116 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4447c964-8787-42b1-9488-ce774e3bd8b8-serving-cert\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410184 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-config\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410216 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-client-ca\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410252 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5bdz\" (UniqueName: \"kubernetes.io/projected/94235936-2945-4acf-8409-a4c261fef3b8-kube-api-access-p5bdz\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410292 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-config\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410346 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94235936-2945-4acf-8409-a4c261fef3b8-serving-cert\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.410427 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn2tq\" (UniqueName: \"kubernetes.io/projected/4447c964-8787-42b1-9488-ce774e3bd8b8-kube-api-access-cn2tq\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.416491 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t5nfh"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.427099 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t5nfh"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.433979 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.438916 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cz9bg"] Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511466 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-config\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511511 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-client-ca\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511533 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5bdz\" (UniqueName: \"kubernetes.io/projected/94235936-2945-4acf-8409-a4c261fef3b8-kube-api-access-p5bdz\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511604 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-config\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511637 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94235936-2945-4acf-8409-a4c261fef3b8-serving-cert\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511675 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn2tq\" (UniqueName: \"kubernetes.io/projected/4447c964-8787-42b1-9488-ce774e3bd8b8-kube-api-access-cn2tq\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511692 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-client-ca\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511708 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-proxy-ca-bundles\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.511723 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4447c964-8787-42b1-9488-ce774e3bd8b8-serving-cert\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.512694 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-client-ca\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.512892 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-client-ca\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.513038 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-config\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.513783 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-proxy-ca-bundles\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.517708 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94235936-2945-4acf-8409-a4c261fef3b8-serving-cert\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.522621 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4447c964-8787-42b1-9488-ce774e3bd8b8-serving-cert\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.523777 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-config\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.530152 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn2tq\" (UniqueName: \"kubernetes.io/projected/4447c964-8787-42b1-9488-ce774e3bd8b8-kube-api-access-cn2tq\") pod \"route-controller-manager-56556846d7-qhjlw\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.530183 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5bdz\" (UniqueName: \"kubernetes.io/projected/94235936-2945-4acf-8409-a4c261fef3b8-kube-api-access-p5bdz\") pod \"controller-manager-6987b5bb57-jr9hc\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.643784 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.656946 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.891552 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-jr9hc"] Dec 02 10:05:50 crc kubenswrapper[4685]: W1202 10:05:50.906396 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94235936_2945_4acf_8409_a4c261fef3b8.slice/crio-4bf74eae318fb41e64068d1102627f14f49293cfa5904776068cd1308caed715 WatchSource:0}: Error finding container 4bf74eae318fb41e64068d1102627f14f49293cfa5904776068cd1308caed715: Status 404 returned error can't find the container with id 4bf74eae318fb41e64068d1102627f14f49293cfa5904776068cd1308caed715 Dec 02 10:05:50 crc kubenswrapper[4685]: I1202 10:05:50.961452 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-jr9hc"] Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.037146 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw"] Dec 02 10:05:51 crc kubenswrapper[4685]: W1202 10:05:51.053472 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4447c964_8787_42b1_9488_ce774e3bd8b8.slice/crio-8278d83a9648ed59ab1a7f29243e4a557503568d9c210a9494435c3b02390cfc WatchSource:0}: Error finding container 8278d83a9648ed59ab1a7f29243e4a557503568d9c210a9494435c3b02390cfc: Status 404 returned error can't find the container with id 8278d83a9648ed59ab1a7f29243e4a557503568d9c210a9494435c3b02390cfc Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.063275 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw"] Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.365775 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" event={"ID":"94235936-2945-4acf-8409-a4c261fef3b8","Type":"ContainerStarted","Data":"f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b"} Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.366124 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.366137 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" event={"ID":"94235936-2945-4acf-8409-a4c261fef3b8","Type":"ContainerStarted","Data":"4bf74eae318fb41e64068d1102627f14f49293cfa5904776068cd1308caed715"} Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.365845 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" podUID="94235936-2945-4acf-8409-a4c261fef3b8" containerName="controller-manager" containerID="cri-o://f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b" gracePeriod=30 Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.368505 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" event={"ID":"4447c964-8787-42b1-9488-ce774e3bd8b8","Type":"ContainerStarted","Data":"abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233"} Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.368575 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" event={"ID":"4447c964-8787-42b1-9488-ce774e3bd8b8","Type":"ContainerStarted","Data":"8278d83a9648ed59ab1a7f29243e4a557503568d9c210a9494435c3b02390cfc"} Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.368643 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" podUID="4447c964-8787-42b1-9488-ce774e3bd8b8" containerName="route-controller-manager" containerID="cri-o://abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233" gracePeriod=30 Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.373319 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.423321 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" podStartSLOduration=3.423293584 podStartE2EDuration="3.423293584s" podCreationTimestamp="2025-12-02 10:05:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:05:51.418421799 +0000 UTC m=+243.790195973" watchObservedRunningTime="2025-12-02 10:05:51.423293584 +0000 UTC m=+243.795067748" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.656891 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.673702 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" podStartSLOduration=2.6736862759999998 podStartE2EDuration="2.673686276s" podCreationTimestamp="2025-12-02 10:05:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:05:51.468021733 +0000 UTC m=+243.839795887" watchObservedRunningTime="2025-12-02 10:05:51.673686276 +0000 UTC m=+244.045460430" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726107 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-client-ca\") pod \"94235936-2945-4acf-8409-a4c261fef3b8\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726160 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5bdz\" (UniqueName: \"kubernetes.io/projected/94235936-2945-4acf-8409-a4c261fef3b8-kube-api-access-p5bdz\") pod \"94235936-2945-4acf-8409-a4c261fef3b8\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726222 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-config\") pod \"94235936-2945-4acf-8409-a4c261fef3b8\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726245 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-proxy-ca-bundles\") pod \"94235936-2945-4acf-8409-a4c261fef3b8\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726268 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94235936-2945-4acf-8409-a4c261fef3b8-serving-cert\") pod \"94235936-2945-4acf-8409-a4c261fef3b8\" (UID: \"94235936-2945-4acf-8409-a4c261fef3b8\") " Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726912 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-client-ca" (OuterVolumeSpecName: "client-ca") pod "94235936-2945-4acf-8409-a4c261fef3b8" (UID: "94235936-2945-4acf-8409-a4c261fef3b8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.726998 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-config" (OuterVolumeSpecName: "config") pod "94235936-2945-4acf-8409-a4c261fef3b8" (UID: "94235936-2945-4acf-8409-a4c261fef3b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.727209 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "94235936-2945-4acf-8409-a4c261fef3b8" (UID: "94235936-2945-4acf-8409-a4c261fef3b8"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.732740 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94235936-2945-4acf-8409-a4c261fef3b8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "94235936-2945-4acf-8409-a4c261fef3b8" (UID: "94235936-2945-4acf-8409-a4c261fef3b8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.732752 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94235936-2945-4acf-8409-a4c261fef3b8-kube-api-access-p5bdz" (OuterVolumeSpecName: "kube-api-access-p5bdz") pod "94235936-2945-4acf-8409-a4c261fef3b8" (UID: "94235936-2945-4acf-8409-a4c261fef3b8"). InnerVolumeSpecName "kube-api-access-p5bdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.828019 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.828054 4685 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.828066 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94235936-2945-4acf-8409-a4c261fef3b8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.828077 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94235936-2945-4acf-8409-a4c261fef3b8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.828088 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5bdz\" (UniqueName: \"kubernetes.io/projected/94235936-2945-4acf-8409-a4c261fef3b8-kube-api-access-p5bdz\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.907324 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="387d4a50-7633-485d-b8d3-ef2afc8c3c97" path="/var/lib/kubelet/pods/387d4a50-7633-485d-b8d3-ef2afc8c3c97/volumes" Dec 02 10:05:51 crc kubenswrapper[4685]: I1202 10:05:51.907860 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="589484c8-ce97-4e9e-b17a-58af833d9915" path="/var/lib/kubelet/pods/589484c8-ce97-4e9e-b17a-58af833d9915/volumes" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.063218 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-56556846d7-qhjlw_4447c964-8787-42b1-9488-ce774e3bd8b8/route-controller-manager/0.log" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.063289 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.130868 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4447c964-8787-42b1-9488-ce774e3bd8b8-serving-cert\") pod \"4447c964-8787-42b1-9488-ce774e3bd8b8\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.130957 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn2tq\" (UniqueName: \"kubernetes.io/projected/4447c964-8787-42b1-9488-ce774e3bd8b8-kube-api-access-cn2tq\") pod \"4447c964-8787-42b1-9488-ce774e3bd8b8\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.130985 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-client-ca\") pod \"4447c964-8787-42b1-9488-ce774e3bd8b8\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.131006 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-config\") pod \"4447c964-8787-42b1-9488-ce774e3bd8b8\" (UID: \"4447c964-8787-42b1-9488-ce774e3bd8b8\") " Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.131807 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-client-ca" (OuterVolumeSpecName: "client-ca") pod "4447c964-8787-42b1-9488-ce774e3bd8b8" (UID: "4447c964-8787-42b1-9488-ce774e3bd8b8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.131836 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-config" (OuterVolumeSpecName: "config") pod "4447c964-8787-42b1-9488-ce774e3bd8b8" (UID: "4447c964-8787-42b1-9488-ce774e3bd8b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.134820 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4447c964-8787-42b1-9488-ce774e3bd8b8-kube-api-access-cn2tq" (OuterVolumeSpecName: "kube-api-access-cn2tq") pod "4447c964-8787-42b1-9488-ce774e3bd8b8" (UID: "4447c964-8787-42b1-9488-ce774e3bd8b8"). InnerVolumeSpecName "kube-api-access-cn2tq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.135338 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4447c964-8787-42b1-9488-ce774e3bd8b8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4447c964-8787-42b1-9488-ce774e3bd8b8" (UID: "4447c964-8787-42b1-9488-ce774e3bd8b8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.232243 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn2tq\" (UniqueName: \"kubernetes.io/projected/4447c964-8787-42b1-9488-ce774e3bd8b8-kube-api-access-cn2tq\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.232281 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.232295 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4447c964-8787-42b1-9488-ce774e3bd8b8-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.232305 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4447c964-8787-42b1-9488-ce774e3bd8b8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.328330 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-78d5c55bd8-s4btz"] Dec 02 10:05:52 crc kubenswrapper[4685]: E1202 10:05:52.328546 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94235936-2945-4acf-8409-a4c261fef3b8" containerName="controller-manager" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.328573 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="94235936-2945-4acf-8409-a4c261fef3b8" containerName="controller-manager" Dec 02 10:05:52 crc kubenswrapper[4685]: E1202 10:05:52.328584 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4447c964-8787-42b1-9488-ce774e3bd8b8" containerName="route-controller-manager" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.328590 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4447c964-8787-42b1-9488-ce774e3bd8b8" containerName="route-controller-manager" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.328674 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4447c964-8787-42b1-9488-ce774e3bd8b8" containerName="route-controller-manager" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.328685 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="94235936-2945-4acf-8409-a4c261fef3b8" containerName="controller-manager" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.329040 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.334342 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78d5c55bd8-s4btz"] Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.375020 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-56556846d7-qhjlw_4447c964-8787-42b1-9488-ce774e3bd8b8/route-controller-manager/0.log" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.375078 4685 generic.go:334] "Generic (PLEG): container finished" podID="4447c964-8787-42b1-9488-ce774e3bd8b8" containerID="abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233" exitCode=255 Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.375149 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.375181 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" event={"ID":"4447c964-8787-42b1-9488-ce774e3bd8b8","Type":"ContainerDied","Data":"abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233"} Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.375220 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw" event={"ID":"4447c964-8787-42b1-9488-ce774e3bd8b8","Type":"ContainerDied","Data":"8278d83a9648ed59ab1a7f29243e4a557503568d9c210a9494435c3b02390cfc"} Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.375239 4685 scope.go:117] "RemoveContainer" containerID="abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.377155 4685 generic.go:334] "Generic (PLEG): container finished" podID="94235936-2945-4acf-8409-a4c261fef3b8" containerID="f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b" exitCode=0 Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.377203 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" event={"ID":"94235936-2945-4acf-8409-a4c261fef3b8","Type":"ContainerDied","Data":"f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b"} Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.377235 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" event={"ID":"94235936-2945-4acf-8409-a4c261fef3b8","Type":"ContainerDied","Data":"4bf74eae318fb41e64068d1102627f14f49293cfa5904776068cd1308caed715"} Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.377244 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6987b5bb57-jr9hc" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.398912 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-jr9hc"] Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.399500 4685 scope.go:117] "RemoveContainer" containerID="abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233" Dec 02 10:05:52 crc kubenswrapper[4685]: E1202 10:05:52.399926 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233\": container with ID starting with abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233 not found: ID does not exist" containerID="abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.399987 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233"} err="failed to get container status \"abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233\": rpc error: code = NotFound desc = could not find container \"abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233\": container with ID starting with abdda1a9c76e3677bf8cb9f359d96b38552c72d9e8d84a52e2bc103963992233 not found: ID does not exist" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.400009 4685 scope.go:117] "RemoveContainer" containerID="f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.402525 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6987b5bb57-jr9hc"] Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.416461 4685 scope.go:117] "RemoveContainer" containerID="f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.416595 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw"] Dec 02 10:05:52 crc kubenswrapper[4685]: E1202 10:05:52.416910 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b\": container with ID starting with f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b not found: ID does not exist" containerID="f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.416943 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b"} err="failed to get container status \"f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b\": rpc error: code = NotFound desc = could not find container \"f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b\": container with ID starting with f179b16252a16258661847cc35330b5bf8cdf5c1c0fc073e62e11394b3713d6b not found: ID does not exist" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.420207 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-56556846d7-qhjlw"] Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.433858 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-client-ca\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.433941 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-proxy-ca-bundles\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.433968 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-config\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.433999 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hpc8\" (UniqueName: \"kubernetes.io/projected/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-kube-api-access-4hpc8\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.434034 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-serving-cert\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.534844 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-client-ca\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.534918 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-proxy-ca-bundles\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.536026 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-client-ca\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.536153 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-config\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.536251 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hpc8\" (UniqueName: \"kubernetes.io/projected/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-kube-api-access-4hpc8\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.536426 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-proxy-ca-bundles\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.536598 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-config\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.536677 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-serving-cert\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.540142 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-serving-cert\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.553257 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hpc8\" (UniqueName: \"kubernetes.io/projected/1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d-kube-api-access-4hpc8\") pod \"controller-manager-78d5c55bd8-s4btz\" (UID: \"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d\") " pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.650550 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:52 crc kubenswrapper[4685]: I1202 10:05:52.938521 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-78d5c55bd8-s4btz"] Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.328073 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh"] Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.328735 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: W1202 10:05:53.331131 4685 reflector.go:561] object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2": failed to list *v1.Secret: secrets "route-controller-manager-sa-dockercfg-h2zr2" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 02 10:05:53 crc kubenswrapper[4685]: E1202 10:05:53.331238 4685 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"route-controller-manager-sa-dockercfg-h2zr2\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"route-controller-manager-sa-dockercfg-h2zr2\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.331705 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.332448 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.332473 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.332512 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.333238 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.343945 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-serving-cert\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.343979 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-client-ca\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.344004 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scqww\" (UniqueName: \"kubernetes.io/projected/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-kube-api-access-scqww\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.344177 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-config\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.385283 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" event={"ID":"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d","Type":"ContainerStarted","Data":"c08c2af3f9831539923cdeab348d22267ea5aff4c62490802dc1a680e947c3e3"} Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.385343 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" event={"ID":"1cc7b7fb-0fa9-4afa-b83b-b3a5dac4f05d","Type":"ContainerStarted","Data":"9efeb1ef8a4cdda89a29c07b1cbedf11fa3256f7b87a93ac2951abaf0a43b02f"} Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.385606 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.392378 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh"] Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.442274 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.444677 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-serving-cert\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.444727 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-client-ca\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.444774 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scqww\" (UniqueName: \"kubernetes.io/projected/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-kube-api-access-scqww\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.445198 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-config\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.445445 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-client-ca\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.446732 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-config\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.457735 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-serving-cert\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.483755 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-78d5c55bd8-s4btz" podStartSLOduration=3.483733808 podStartE2EDuration="3.483733808s" podCreationTimestamp="2025-12-02 10:05:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:05:53.418535169 +0000 UTC m=+245.790309353" watchObservedRunningTime="2025-12-02 10:05:53.483733808 +0000 UTC m=+245.855507962" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.518602 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scqww\" (UniqueName: \"kubernetes.io/projected/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-kube-api-access-scqww\") pod \"route-controller-manager-5756f44d6-5qnzh\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.904942 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4447c964-8787-42b1-9488-ce774e3bd8b8" path="/var/lib/kubelet/pods/4447c964-8787-42b1-9488-ce774e3bd8b8/volumes" Dec 02 10:05:53 crc kubenswrapper[4685]: I1202 10:05:53.905583 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94235936-2945-4acf-8409-a4c261fef3b8" path="/var/lib/kubelet/pods/94235936-2945-4acf-8409-a4c261fef3b8/volumes" Dec 02 10:05:54 crc kubenswrapper[4685]: I1202 10:05:54.645491 4685 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" secret="" err="failed to sync secret cache: timed out waiting for the condition" Dec 02 10:05:54 crc kubenswrapper[4685]: I1202 10:05:54.645671 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:54 crc kubenswrapper[4685]: I1202 10:05:54.815917 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 02 10:05:54 crc kubenswrapper[4685]: I1202 10:05:54.850368 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh"] Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.395274 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" event={"ID":"b9b4353a-ddbf-450f-90b3-6f23fbf965dc","Type":"ContainerStarted","Data":"bbb63c9fc1f6e54d57623c4f8d238d47422cdb4d0d10fd631e39c87eb7759165"} Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.395609 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" event={"ID":"b9b4353a-ddbf-450f-90b3-6f23fbf965dc","Type":"ContainerStarted","Data":"e8edfe3b3e123a401f6d326af83c29ae91e95f1971bdd36e13572b17424d5296"} Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.409983 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" podStartSLOduration=4.409953842 podStartE2EDuration="4.409953842s" podCreationTimestamp="2025-12-02 10:05:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:05:55.40787022 +0000 UTC m=+247.779644384" watchObservedRunningTime="2025-12-02 10:05:55.409953842 +0000 UTC m=+247.781727996" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.699226 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-csktt"] Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.700252 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.704858 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.721237 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-csktt"] Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.872233 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrbtv\" (UniqueName: \"kubernetes.io/projected/5670a146-6fa3-4825-a054-77dc530bf1c4-kube-api-access-xrbtv\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.872282 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5670a146-6fa3-4825-a054-77dc530bf1c4-utilities\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.872312 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5670a146-6fa3-4825-a054-77dc530bf1c4-catalog-content\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.973038 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrbtv\" (UniqueName: \"kubernetes.io/projected/5670a146-6fa3-4825-a054-77dc530bf1c4-kube-api-access-xrbtv\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.973101 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5670a146-6fa3-4825-a054-77dc530bf1c4-utilities\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.973141 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5670a146-6fa3-4825-a054-77dc530bf1c4-catalog-content\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.973687 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5670a146-6fa3-4825-a054-77dc530bf1c4-catalog-content\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.973719 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5670a146-6fa3-4825-a054-77dc530bf1c4-utilities\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:55 crc kubenswrapper[4685]: I1202 10:05:55.992302 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrbtv\" (UniqueName: \"kubernetes.io/projected/5670a146-6fa3-4825-a054-77dc530bf1c4-kube-api-access-xrbtv\") pod \"redhat-operators-csktt\" (UID: \"5670a146-6fa3-4825-a054-77dc530bf1c4\") " pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.015962 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.228920 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-csktt"] Dec 02 10:05:56 crc kubenswrapper[4685]: W1202 10:05:56.233768 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5670a146_6fa3_4825_a054_77dc530bf1c4.slice/crio-89329adc0f1b673c394fd01609a5f6119acdcfc7f116efbc13f44d5ac15f74ec WatchSource:0}: Error finding container 89329adc0f1b673c394fd01609a5f6119acdcfc7f116efbc13f44d5ac15f74ec: Status 404 returned error can't find the container with id 89329adc0f1b673c394fd01609a5f6119acdcfc7f116efbc13f44d5ac15f74ec Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.413694 4685 generic.go:334] "Generic (PLEG): container finished" podID="5670a146-6fa3-4825-a054-77dc530bf1c4" containerID="7a8aab952187db68035b12007e93cc1718566edf0564b89e8cb5b69b32775bc4" exitCode=0 Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.413804 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-csktt" event={"ID":"5670a146-6fa3-4825-a054-77dc530bf1c4","Type":"ContainerDied","Data":"7a8aab952187db68035b12007e93cc1718566edf0564b89e8cb5b69b32775bc4"} Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.414280 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-csktt" event={"ID":"5670a146-6fa3-4825-a054-77dc530bf1c4","Type":"ContainerStarted","Data":"89329adc0f1b673c394fd01609a5f6119acdcfc7f116efbc13f44d5ac15f74ec"} Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.414631 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:56 crc kubenswrapper[4685]: I1202 10:05:56.428687 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.094278 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jkc9w"] Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.096503 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.099223 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.108785 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jkc9w"] Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.189838 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-catalog-content\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.190180 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-utilities\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.190346 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmnr6\" (UniqueName: \"kubernetes.io/projected/1f9b0e49-5d75-4e57-a136-4723a4a12c65-kube-api-access-zmnr6\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.291778 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmnr6\" (UniqueName: \"kubernetes.io/projected/1f9b0e49-5d75-4e57-a136-4723a4a12c65-kube-api-access-zmnr6\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.292053 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-catalog-content\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.292188 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-utilities\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.292587 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-catalog-content\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.292671 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-utilities\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.330839 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmnr6\" (UniqueName: \"kubernetes.io/projected/1f9b0e49-5d75-4e57-a136-4723a4a12c65-kube-api-access-zmnr6\") pod \"community-operators-jkc9w\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.416835 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:05:57 crc kubenswrapper[4685]: I1202 10:05:57.859932 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jkc9w"] Dec 02 10:05:57 crc kubenswrapper[4685]: W1202 10:05:57.867671 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f9b0e49_5d75_4e57_a136_4723a4a12c65.slice/crio-3537e4af5d48985e2e826c24b2796444c1e6baa55d876133abb41b8bd2198c46 WatchSource:0}: Error finding container 3537e4af5d48985e2e826c24b2796444c1e6baa55d876133abb41b8bd2198c46: Status 404 returned error can't find the container with id 3537e4af5d48985e2e826c24b2796444c1e6baa55d876133abb41b8bd2198c46 Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.093908 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g2zlc"] Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.095144 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.098453 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.105412 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-utilities\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.105474 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-catalog-content\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.105517 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ldc9\" (UniqueName: \"kubernetes.io/projected/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-kube-api-access-2ldc9\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.111920 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g2zlc"] Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.206114 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-utilities\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.206163 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-catalog-content\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.206184 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ldc9\" (UniqueName: \"kubernetes.io/projected/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-kube-api-access-2ldc9\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.206890 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-catalog-content\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.206891 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-utilities\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.230938 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ldc9\" (UniqueName: \"kubernetes.io/projected/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-kube-api-access-2ldc9\") pod \"certified-operators-g2zlc\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.409732 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.426093 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerID="5bad5ce49a62f4a67bd8fa88ee176a23c04d6c2be891a9f6586b46a4301c3e13" exitCode=0 Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.426202 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerDied","Data":"5bad5ce49a62f4a67bd8fa88ee176a23c04d6c2be891a9f6586b46a4301c3e13"} Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.426291 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerStarted","Data":"3537e4af5d48985e2e826c24b2796444c1e6baa55d876133abb41b8bd2198c46"} Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.429004 4685 generic.go:334] "Generic (PLEG): container finished" podID="5670a146-6fa3-4825-a054-77dc530bf1c4" containerID="91781e6fcbddb19c66db8d43480dc557308d827248cd0606bbf51439f45737d3" exitCode=0 Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.429962 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-csktt" event={"ID":"5670a146-6fa3-4825-a054-77dc530bf1c4","Type":"ContainerDied","Data":"91781e6fcbddb19c66db8d43480dc557308d827248cd0606bbf51439f45737d3"} Dec 02 10:05:58 crc kubenswrapper[4685]: I1202 10:05:58.847280 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g2zlc"] Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.435659 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerStarted","Data":"ece16396d36387bb05835f72d6e7f940f8e9e8da1113b6616d81997233d4b593"} Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.440179 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-csktt" event={"ID":"5670a146-6fa3-4825-a054-77dc530bf1c4","Type":"ContainerStarted","Data":"026146a92cfdf89b8eeca2f3d21f327e65de8150d109bff6224c3e3722d17ff8"} Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.441684 4685 generic.go:334] "Generic (PLEG): container finished" podID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerID="a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491" exitCode=0 Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.441725 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2zlc" event={"ID":"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4","Type":"ContainerDied","Data":"a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491"} Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.441749 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2zlc" event={"ID":"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4","Type":"ContainerStarted","Data":"433ed3a5fef13922376bdb192816a7249f2b15a6660ebe1512fb0abab2c9a018"} Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.495521 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-csktt" podStartSLOduration=1.909875946 podStartE2EDuration="4.49548878s" podCreationTimestamp="2025-12-02 10:05:55 +0000 UTC" firstStartedPulling="2025-12-02 10:05:56.416351036 +0000 UTC m=+248.788125190" lastFinishedPulling="2025-12-02 10:05:59.00196387 +0000 UTC m=+251.373738024" observedRunningTime="2025-12-02 10:05:59.486990346 +0000 UTC m=+251.858764510" watchObservedRunningTime="2025-12-02 10:05:59.49548878 +0000 UTC m=+251.867262934" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.503790 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m54gz"] Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.505707 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.507401 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.513794 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m54gz"] Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.521272 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bb5a867-9c67-423f-9780-97a59d6cc9da-catalog-content\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.521332 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgl2j\" (UniqueName: \"kubernetes.io/projected/3bb5a867-9c67-423f-9780-97a59d6cc9da-kube-api-access-mgl2j\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.521411 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bb5a867-9c67-423f-9780-97a59d6cc9da-utilities\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.622344 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bb5a867-9c67-423f-9780-97a59d6cc9da-catalog-content\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.622389 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgl2j\" (UniqueName: \"kubernetes.io/projected/3bb5a867-9c67-423f-9780-97a59d6cc9da-kube-api-access-mgl2j\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.622430 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bb5a867-9c67-423f-9780-97a59d6cc9da-utilities\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.622892 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bb5a867-9c67-423f-9780-97a59d6cc9da-utilities\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.623137 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bb5a867-9c67-423f-9780-97a59d6cc9da-catalog-content\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.650779 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgl2j\" (UniqueName: \"kubernetes.io/projected/3bb5a867-9c67-423f-9780-97a59d6cc9da-kube-api-access-mgl2j\") pod \"redhat-marketplace-m54gz\" (UID: \"3bb5a867-9c67-423f-9780-97a59d6cc9da\") " pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:05:59 crc kubenswrapper[4685]: E1202 10:05:59.758545 4685 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f9b0e49_5d75_4e57_a136_4723a4a12c65.slice/crio-ece16396d36387bb05835f72d6e7f940f8e9e8da1113b6616d81997233d4b593.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f9b0e49_5d75_4e57_a136_4723a4a12c65.slice/crio-conmon-ece16396d36387bb05835f72d6e7f940f8e9e8da1113b6616d81997233d4b593.scope\": RecentStats: unable to find data in memory cache]" Dec 02 10:05:59 crc kubenswrapper[4685]: I1202 10:05:59.821479 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:06:00 crc kubenswrapper[4685]: I1202 10:06:00.253497 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m54gz"] Dec 02 10:06:00 crc kubenswrapper[4685]: I1202 10:06:00.451519 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerID="ece16396d36387bb05835f72d6e7f940f8e9e8da1113b6616d81997233d4b593" exitCode=0 Dec 02 10:06:00 crc kubenswrapper[4685]: I1202 10:06:00.451680 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerDied","Data":"ece16396d36387bb05835f72d6e7f940f8e9e8da1113b6616d81997233d4b593"} Dec 02 10:06:00 crc kubenswrapper[4685]: I1202 10:06:00.456700 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m54gz" event={"ID":"3bb5a867-9c67-423f-9780-97a59d6cc9da","Type":"ContainerStarted","Data":"1f8ba169260f6cb0de4cb430e04cdad99cc5efc78e797e841b234c367734cd88"} Dec 02 10:06:01 crc kubenswrapper[4685]: I1202 10:06:01.462524 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerStarted","Data":"960e56704d1ab02966cf512a6266c0cbc03a297a55984c3dcf5647083cac26b7"} Dec 02 10:06:01 crc kubenswrapper[4685]: I1202 10:06:01.463833 4685 generic.go:334] "Generic (PLEG): container finished" podID="3bb5a867-9c67-423f-9780-97a59d6cc9da" containerID="2827c502dbeba777b25f310f63b010f88e2e30cfb0f8fb7004cdbdeebd7e36b4" exitCode=0 Dec 02 10:06:01 crc kubenswrapper[4685]: I1202 10:06:01.463883 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m54gz" event={"ID":"3bb5a867-9c67-423f-9780-97a59d6cc9da","Type":"ContainerDied","Data":"2827c502dbeba777b25f310f63b010f88e2e30cfb0f8fb7004cdbdeebd7e36b4"} Dec 02 10:06:01 crc kubenswrapper[4685]: I1202 10:06:01.465974 4685 generic.go:334] "Generic (PLEG): container finished" podID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerID="6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741" exitCode=0 Dec 02 10:06:01 crc kubenswrapper[4685]: I1202 10:06:01.466007 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2zlc" event={"ID":"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4","Type":"ContainerDied","Data":"6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741"} Dec 02 10:06:01 crc kubenswrapper[4685]: I1202 10:06:01.489416 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jkc9w" podStartSLOduration=1.927416493 podStartE2EDuration="4.489398215s" podCreationTimestamp="2025-12-02 10:05:57 +0000 UTC" firstStartedPulling="2025-12-02 10:05:58.427172785 +0000 UTC m=+250.798946939" lastFinishedPulling="2025-12-02 10:06:00.989154507 +0000 UTC m=+253.360928661" observedRunningTime="2025-12-02 10:06:01.486634173 +0000 UTC m=+253.858408327" watchObservedRunningTime="2025-12-02 10:06:01.489398215 +0000 UTC m=+253.861172369" Dec 02 10:06:02 crc kubenswrapper[4685]: I1202 10:06:02.473366 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2zlc" event={"ID":"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4","Type":"ContainerStarted","Data":"c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12"} Dec 02 10:06:04 crc kubenswrapper[4685]: I1202 10:06:04.482416 4685 generic.go:334] "Generic (PLEG): container finished" podID="3bb5a867-9c67-423f-9780-97a59d6cc9da" containerID="08490d1ff0e7607b55900d05f5132e3d1bea1fbf58a3b1d6a518ab25d142afb0" exitCode=0 Dec 02 10:06:04 crc kubenswrapper[4685]: I1202 10:06:04.482690 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m54gz" event={"ID":"3bb5a867-9c67-423f-9780-97a59d6cc9da","Type":"ContainerDied","Data":"08490d1ff0e7607b55900d05f5132e3d1bea1fbf58a3b1d6a518ab25d142afb0"} Dec 02 10:06:04 crc kubenswrapper[4685]: I1202 10:06:04.503943 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g2zlc" podStartSLOduration=3.998643603 podStartE2EDuration="6.503926419s" podCreationTimestamp="2025-12-02 10:05:58 +0000 UTC" firstStartedPulling="2025-12-02 10:05:59.444398311 +0000 UTC m=+251.816172455" lastFinishedPulling="2025-12-02 10:06:01.949681117 +0000 UTC m=+254.321455271" observedRunningTime="2025-12-02 10:06:02.512233018 +0000 UTC m=+254.884007192" watchObservedRunningTime="2025-12-02 10:06:04.503926419 +0000 UTC m=+256.875700573" Dec 02 10:06:06 crc kubenswrapper[4685]: I1202 10:06:06.017097 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:06:06 crc kubenswrapper[4685]: I1202 10:06:06.017430 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:06:06 crc kubenswrapper[4685]: I1202 10:06:06.056922 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:06:06 crc kubenswrapper[4685]: I1202 10:06:06.496746 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m54gz" event={"ID":"3bb5a867-9c67-423f-9780-97a59d6cc9da","Type":"ContainerStarted","Data":"1bfb658c05e0ffc7b896035f053b8752b2eb08178565fe5b446b0f571744b3ea"} Dec 02 10:06:06 crc kubenswrapper[4685]: I1202 10:06:06.537414 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-csktt" Dec 02 10:06:06 crc kubenswrapper[4685]: I1202 10:06:06.540156 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m54gz" podStartSLOduration=3.346561044 podStartE2EDuration="7.540137843s" podCreationTimestamp="2025-12-02 10:05:59 +0000 UTC" firstStartedPulling="2025-12-02 10:06:01.465219627 +0000 UTC m=+253.836993771" lastFinishedPulling="2025-12-02 10:06:05.658796406 +0000 UTC m=+258.030570570" observedRunningTime="2025-12-02 10:06:06.535056401 +0000 UTC m=+258.906830555" watchObservedRunningTime="2025-12-02 10:06:06.540137843 +0000 UTC m=+258.911911997" Dec 02 10:06:07 crc kubenswrapper[4685]: I1202 10:06:07.417792 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:06:07 crc kubenswrapper[4685]: I1202 10:06:07.417873 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:06:07 crc kubenswrapper[4685]: I1202 10:06:07.470221 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:06:07 crc kubenswrapper[4685]: I1202 10:06:07.546153 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:06:08 crc kubenswrapper[4685]: I1202 10:06:08.410892 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:06:08 crc kubenswrapper[4685]: I1202 10:06:08.411149 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:06:08 crc kubenswrapper[4685]: I1202 10:06:08.446006 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:06:08 crc kubenswrapper[4685]: I1202 10:06:08.545249 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.745625 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wk72r"] Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.747137 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.765514 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wk72r"] Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.821896 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.821958 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850631 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-bound-sa-token\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850680 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqgrl\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-kube-api-access-rqgrl\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850718 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850753 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0563a299-0cad-45d1-af03-057ec098f6f1-trusted-ca\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850776 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0563a299-0cad-45d1-af03-057ec098f6f1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850803 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0563a299-0cad-45d1-af03-057ec098f6f1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850840 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0563a299-0cad-45d1-af03-057ec098f6f1-registry-certificates\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.850869 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-registry-tls\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.865243 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.895708 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.951816 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-bound-sa-token\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.951879 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqgrl\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-kube-api-access-rqgrl\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.951917 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0563a299-0cad-45d1-af03-057ec098f6f1-trusted-ca\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.951936 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0563a299-0cad-45d1-af03-057ec098f6f1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.951976 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0563a299-0cad-45d1-af03-057ec098f6f1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.952014 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0563a299-0cad-45d1-af03-057ec098f6f1-registry-certificates\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.952045 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-registry-tls\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.954444 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0563a299-0cad-45d1-af03-057ec098f6f1-trusted-ca\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.955158 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0563a299-0cad-45d1-af03-057ec098f6f1-ca-trust-extracted\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.955912 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0563a299-0cad-45d1-af03-057ec098f6f1-registry-certificates\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.959136 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0563a299-0cad-45d1-af03-057ec098f6f1-installation-pull-secrets\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.968791 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-registry-tls\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.972505 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqgrl\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-kube-api-access-rqgrl\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:09 crc kubenswrapper[4685]: I1202 10:06:09.985457 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0563a299-0cad-45d1-af03-057ec098f6f1-bound-sa-token\") pod \"image-registry-66df7c8f76-wk72r\" (UID: \"0563a299-0cad-45d1-af03-057ec098f6f1\") " pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:10 crc kubenswrapper[4685]: I1202 10:06:10.064847 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:10 crc kubenswrapper[4685]: I1202 10:06:10.452481 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-wk72r"] Dec 02 10:06:10 crc kubenswrapper[4685]: I1202 10:06:10.518252 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" event={"ID":"0563a299-0cad-45d1-af03-057ec098f6f1","Type":"ContainerStarted","Data":"18ac527d5ca0fcd513430cbb223b6421232484533e3edb012e93c5d2d8263ce8"} Dec 02 10:06:10 crc kubenswrapper[4685]: I1202 10:06:10.566817 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m54gz" Dec 02 10:06:14 crc kubenswrapper[4685]: I1202 10:06:14.539398 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" event={"ID":"0563a299-0cad-45d1-af03-057ec098f6f1","Type":"ContainerStarted","Data":"30146b5854537f9a01d408697c40af9f53514abc820f7dd8ff92531545779777"} Dec 02 10:06:14 crc kubenswrapper[4685]: I1202 10:06:14.540530 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:14 crc kubenswrapper[4685]: I1202 10:06:14.563452 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" podStartSLOduration=5.563428833 podStartE2EDuration="5.563428833s" podCreationTimestamp="2025-12-02 10:06:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:06:14.560076418 +0000 UTC m=+266.931850562" watchObservedRunningTime="2025-12-02 10:06:14.563428833 +0000 UTC m=+266.935203007" Dec 02 10:06:28 crc kubenswrapper[4685]: I1202 10:06:28.852166 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh"] Dec 02 10:06:28 crc kubenswrapper[4685]: I1202 10:06:28.853000 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" podUID="b9b4353a-ddbf-450f-90b3-6f23fbf965dc" containerName="route-controller-manager" containerID="cri-o://bbb63c9fc1f6e54d57623c4f8d238d47422cdb4d0d10fd631e39c87eb7759165" gracePeriod=30 Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.619942 4685 generic.go:334] "Generic (PLEG): container finished" podID="b9b4353a-ddbf-450f-90b3-6f23fbf965dc" containerID="bbb63c9fc1f6e54d57623c4f8d238d47422cdb4d0d10fd631e39c87eb7759165" exitCode=0 Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.620133 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" event={"ID":"b9b4353a-ddbf-450f-90b3-6f23fbf965dc","Type":"ContainerDied","Data":"bbb63c9fc1f6e54d57623c4f8d238d47422cdb4d0d10fd631e39c87eb7759165"} Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.829265 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.836120 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-config\") pod \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.836179 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-client-ca\") pod \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.836214 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-serving-cert\") pod \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.836250 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scqww\" (UniqueName: \"kubernetes.io/projected/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-kube-api-access-scqww\") pod \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\" (UID: \"b9b4353a-ddbf-450f-90b3-6f23fbf965dc\") " Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.837578 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-client-ca" (OuterVolumeSpecName: "client-ca") pod "b9b4353a-ddbf-450f-90b3-6f23fbf965dc" (UID: "b9b4353a-ddbf-450f-90b3-6f23fbf965dc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.838145 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-config" (OuterVolumeSpecName: "config") pod "b9b4353a-ddbf-450f-90b3-6f23fbf965dc" (UID: "b9b4353a-ddbf-450f-90b3-6f23fbf965dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.845184 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b9b4353a-ddbf-450f-90b3-6f23fbf965dc" (UID: "b9b4353a-ddbf-450f-90b3-6f23fbf965dc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.850144 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-kube-api-access-scqww" (OuterVolumeSpecName: "kube-api-access-scqww") pod "b9b4353a-ddbf-450f-90b3-6f23fbf965dc" (UID: "b9b4353a-ddbf-450f-90b3-6f23fbf965dc"). InnerVolumeSpecName "kube-api-access-scqww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.937702 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.938446 4685 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-client-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.938583 4685 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:29 crc kubenswrapper[4685]: I1202 10:06:29.938665 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scqww\" (UniqueName: \"kubernetes.io/projected/b9b4353a-ddbf-450f-90b3-6f23fbf965dc-kube-api-access-scqww\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.069407 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-wk72r" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.116185 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fcrk7"] Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.351701 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp"] Dec 02 10:06:30 crc kubenswrapper[4685]: E1202 10:06:30.352498 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9b4353a-ddbf-450f-90b3-6f23fbf965dc" containerName="route-controller-manager" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.352644 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9b4353a-ddbf-450f-90b3-6f23fbf965dc" containerName="route-controller-manager" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.352823 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9b4353a-ddbf-450f-90b3-6f23fbf965dc" containerName="route-controller-manager" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.353322 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.364827 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp"] Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.444206 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e1e9ade-5264-4950-9d23-7cb448a0bf30-config\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.444263 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e1e9ade-5264-4950-9d23-7cb448a0bf30-client-ca\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.444307 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e1e9ade-5264-4950-9d23-7cb448a0bf30-serving-cert\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.444454 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzhck\" (UniqueName: \"kubernetes.io/projected/7e1e9ade-5264-4950-9d23-7cb448a0bf30-kube-api-access-rzhck\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.545665 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzhck\" (UniqueName: \"kubernetes.io/projected/7e1e9ade-5264-4950-9d23-7cb448a0bf30-kube-api-access-rzhck\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.545897 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e1e9ade-5264-4950-9d23-7cb448a0bf30-config\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.547020 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e1e9ade-5264-4950-9d23-7cb448a0bf30-client-ca\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.547264 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e1e9ade-5264-4950-9d23-7cb448a0bf30-serving-cert\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.547956 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7e1e9ade-5264-4950-9d23-7cb448a0bf30-client-ca\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.549790 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e1e9ade-5264-4950-9d23-7cb448a0bf30-config\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.554646 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e1e9ade-5264-4950-9d23-7cb448a0bf30-serving-cert\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.565068 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzhck\" (UniqueName: \"kubernetes.io/projected/7e1e9ade-5264-4950-9d23-7cb448a0bf30-kube-api-access-rzhck\") pod \"route-controller-manager-5547cc666d-qzwjp\" (UID: \"7e1e9ade-5264-4950-9d23-7cb448a0bf30\") " pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.627985 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" event={"ID":"b9b4353a-ddbf-450f-90b3-6f23fbf965dc","Type":"ContainerDied","Data":"e8edfe3b3e123a401f6d326af83c29ae91e95f1971bdd36e13572b17424d5296"} Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.628034 4685 scope.go:117] "RemoveContainer" containerID="bbb63c9fc1f6e54d57623c4f8d238d47422cdb4d0d10fd631e39c87eb7759165" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.628427 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh" Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.651594 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh"] Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.655304 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5756f44d6-5qnzh"] Dec 02 10:06:30 crc kubenswrapper[4685]: I1202 10:06:30.669280 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.051612 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp"] Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.635490 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" event={"ID":"7e1e9ade-5264-4950-9d23-7cb448a0bf30","Type":"ContainerStarted","Data":"f8c0d9430d5e05c25911278c2c0a3f0ad098acf181ac5cdd2a937ef2d09ff965"} Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.635822 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.635837 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" event={"ID":"7e1e9ade-5264-4950-9d23-7cb448a0bf30","Type":"ContainerStarted","Data":"f212c2000b35976f6323058793a2c390ccab77863f4387213922959030239798"} Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.654657 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" podStartSLOduration=3.6546340600000002 podStartE2EDuration="3.65463406s" podCreationTimestamp="2025-12-02 10:06:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:06:31.651626505 +0000 UTC m=+284.023400669" watchObservedRunningTime="2025-12-02 10:06:31.65463406 +0000 UTC m=+284.026408214" Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.880143 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5547cc666d-qzwjp" Dec 02 10:06:31 crc kubenswrapper[4685]: I1202 10:06:31.908370 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9b4353a-ddbf-450f-90b3-6f23fbf965dc" path="/var/lib/kubelet/pods/b9b4353a-ddbf-450f-90b3-6f23fbf965dc/volumes" Dec 02 10:06:47 crc kubenswrapper[4685]: I1202 10:06:47.775136 4685 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.162823 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" podUID="8a4fad5a-444e-4161-b3df-a473b62dca2d" containerName="registry" containerID="cri-o://d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915" gracePeriod=30 Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.530930 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694092 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-certificates\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694152 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ks7mv\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-kube-api-access-ks7mv\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694274 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-tls\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694679 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a4fad5a-444e-4161-b3df-a473b62dca2d-installation-pull-secrets\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694731 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694797 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694824 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-bound-sa-token\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694844 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-trusted-ca\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.694872 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a4fad5a-444e-4161-b3df-a473b62dca2d-ca-trust-extracted\") pod \"8a4fad5a-444e-4161-b3df-a473b62dca2d\" (UID: \"8a4fad5a-444e-4161-b3df-a473b62dca2d\") " Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.695104 4685 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.695497 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.700847 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.700848 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a4fad5a-444e-4161-b3df-a473b62dca2d-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.702034 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-kube-api-access-ks7mv" (OuterVolumeSpecName: "kube-api-access-ks7mv") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "kube-api-access-ks7mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.702253 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.703186 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.710840 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a4fad5a-444e-4161-b3df-a473b62dca2d-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8a4fad5a-444e-4161-b3df-a473b62dca2d" (UID: "8a4fad5a-444e-4161-b3df-a473b62dca2d"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.770920 4685 generic.go:334] "Generic (PLEG): container finished" podID="8a4fad5a-444e-4161-b3df-a473b62dca2d" containerID="d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915" exitCode=0 Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.770966 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.770974 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" event={"ID":"8a4fad5a-444e-4161-b3df-a473b62dca2d","Type":"ContainerDied","Data":"d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915"} Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.770999 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-fcrk7" event={"ID":"8a4fad5a-444e-4161-b3df-a473b62dca2d","Type":"ContainerDied","Data":"e9225d0fda879278b5d33be4b47547e4458c195c2b69ba4585d0a4cfaa9c6a94"} Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.771015 4685 scope.go:117] "RemoveContainer" containerID="d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.801227 4685 scope.go:117] "RemoveContainer" containerID="d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.801939 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ks7mv\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-kube-api-access-ks7mv\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.801970 4685 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.801980 4685 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8a4fad5a-444e-4161-b3df-a473b62dca2d-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.801988 4685 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8a4fad5a-444e-4161-b3df-a473b62dca2d-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.801998 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8a4fad5a-444e-4161-b3df-a473b62dca2d-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.802006 4685 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8a4fad5a-444e-4161-b3df-a473b62dca2d-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 02 10:06:55 crc kubenswrapper[4685]: E1202 10:06:55.802082 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915\": container with ID starting with d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915 not found: ID does not exist" containerID="d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.802116 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915"} err="failed to get container status \"d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915\": rpc error: code = NotFound desc = could not find container \"d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915\": container with ID starting with d9f5fe5dea799998294435f9bb9e626db3ec08ed6d157e968d64197232531915 not found: ID does not exist" Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.805693 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fcrk7"] Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.811374 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-fcrk7"] Dec 02 10:06:55 crc kubenswrapper[4685]: I1202 10:06:55.907243 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a4fad5a-444e-4161-b3df-a473b62dca2d" path="/var/lib/kubelet/pods/8a4fad5a-444e-4161-b3df-a473b62dca2d/volumes" Dec 02 10:07:12 crc kubenswrapper[4685]: I1202 10:07:12.148087 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:07:12 crc kubenswrapper[4685]: I1202 10:07:12.149165 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:07:42 crc kubenswrapper[4685]: I1202 10:07:42.147262 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:07:42 crc kubenswrapper[4685]: I1202 10:07:42.147806 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:08:12 crc kubenswrapper[4685]: I1202 10:08:12.147552 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:08:12 crc kubenswrapper[4685]: I1202 10:08:12.148178 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:08:12 crc kubenswrapper[4685]: I1202 10:08:12.148242 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:08:12 crc kubenswrapper[4685]: I1202 10:08:12.148860 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0071a28d3e21c488be2b1d07993acd33dbd116d8424d85af9c0eda9381b4c6d4"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:08:12 crc kubenswrapper[4685]: I1202 10:08:12.148953 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://0071a28d3e21c488be2b1d07993acd33dbd116d8424d85af9c0eda9381b4c6d4" gracePeriod=600 Dec 02 10:08:13 crc kubenswrapper[4685]: I1202 10:08:13.174388 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="0071a28d3e21c488be2b1d07993acd33dbd116d8424d85af9c0eda9381b4c6d4" exitCode=0 Dec 02 10:08:13 crc kubenswrapper[4685]: I1202 10:08:13.174438 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"0071a28d3e21c488be2b1d07993acd33dbd116d8424d85af9c0eda9381b4c6d4"} Dec 02 10:08:13 crc kubenswrapper[4685]: I1202 10:08:13.174970 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"756a98cb3b9410eaff964c85a1d8208136ab965899a092f6f28f719bd0b0236c"} Dec 02 10:08:13 crc kubenswrapper[4685]: I1202 10:08:13.174991 4685 scope.go:117] "RemoveContainer" containerID="08e04cf99fa3ee1491747d78ebb85fb820355da3a86817cc7bdd5250562fc29e" Dec 02 10:10:12 crc kubenswrapper[4685]: I1202 10:10:12.148133 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:10:12 crc kubenswrapper[4685]: I1202 10:10:12.148687 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:10:42 crc kubenswrapper[4685]: I1202 10:10:42.147840 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:10:42 crc kubenswrapper[4685]: I1202 10:10:42.148785 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:11:12 crc kubenswrapper[4685]: I1202 10:11:12.147660 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:11:12 crc kubenswrapper[4685]: I1202 10:11:12.148463 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:11:12 crc kubenswrapper[4685]: I1202 10:11:12.148537 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:11:12 crc kubenswrapper[4685]: I1202 10:11:12.149421 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"756a98cb3b9410eaff964c85a1d8208136ab965899a092f6f28f719bd0b0236c"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:11:12 crc kubenswrapper[4685]: I1202 10:11:12.149559 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://756a98cb3b9410eaff964c85a1d8208136ab965899a092f6f28f719bd0b0236c" gracePeriod=600 Dec 02 10:11:13 crc kubenswrapper[4685]: I1202 10:11:13.205287 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="756a98cb3b9410eaff964c85a1d8208136ab965899a092f6f28f719bd0b0236c" exitCode=0 Dec 02 10:11:13 crc kubenswrapper[4685]: I1202 10:11:13.205299 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"756a98cb3b9410eaff964c85a1d8208136ab965899a092f6f28f719bd0b0236c"} Dec 02 10:11:13 crc kubenswrapper[4685]: I1202 10:11:13.205638 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"3a4bcb8208e258f079f3fb221ee480a662e75e3d7ac311aacc2db0117f88c73a"} Dec 02 10:11:13 crc kubenswrapper[4685]: I1202 10:11:13.205663 4685 scope.go:117] "RemoveContainer" containerID="0071a28d3e21c488be2b1d07993acd33dbd116d8424d85af9c0eda9381b4c6d4" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.333752 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-2dnjg"] Dec 02 10:12:15 crc kubenswrapper[4685]: E1202 10:12:15.334486 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a4fad5a-444e-4161-b3df-a473b62dca2d" containerName="registry" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.334501 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a4fad5a-444e-4161-b3df-a473b62dca2d" containerName="registry" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.334651 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a4fad5a-444e-4161-b3df-a473b62dca2d" containerName="registry" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.335082 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.337757 4685 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-w64gg" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.338715 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.338803 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.344962 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-2dnjg"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.351698 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-lt7gc"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.352431 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-lt7gc" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.354137 4685 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-gnr2f" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.374578 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-6gfx7"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.375180 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.394696 4685 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5dkh5" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.396006 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-lt7gc"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.424871 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-6gfx7"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.526301 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl96q\" (UniqueName: \"kubernetes.io/projected/379893b2-eb01-49f9-b70c-9e459c6c6ed0-kube-api-access-cl96q\") pod \"cert-manager-webhook-5655c58dd6-6gfx7\" (UID: \"379893b2-eb01-49f9-b70c-9e459c6c6ed0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.526351 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g6wt\" (UniqueName: \"kubernetes.io/projected/f2c74be0-fa96-4153-8159-bd67e214d601-kube-api-access-4g6wt\") pod \"cert-manager-5b446d88c5-lt7gc\" (UID: \"f2c74be0-fa96-4153-8159-bd67e214d601\") " pod="cert-manager/cert-manager-5b446d88c5-lt7gc" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.526369 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz8jd\" (UniqueName: \"kubernetes.io/projected/9e280929-1daa-4ff4-b690-1192d259178d-kube-api-access-nz8jd\") pod \"cert-manager-cainjector-7f985d654d-2dnjg\" (UID: \"9e280929-1daa-4ff4-b690-1192d259178d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.627771 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl96q\" (UniqueName: \"kubernetes.io/projected/379893b2-eb01-49f9-b70c-9e459c6c6ed0-kube-api-access-cl96q\") pod \"cert-manager-webhook-5655c58dd6-6gfx7\" (UID: \"379893b2-eb01-49f9-b70c-9e459c6c6ed0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.627816 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g6wt\" (UniqueName: \"kubernetes.io/projected/f2c74be0-fa96-4153-8159-bd67e214d601-kube-api-access-4g6wt\") pod \"cert-manager-5b446d88c5-lt7gc\" (UID: \"f2c74be0-fa96-4153-8159-bd67e214d601\") " pod="cert-manager/cert-manager-5b446d88c5-lt7gc" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.627831 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz8jd\" (UniqueName: \"kubernetes.io/projected/9e280929-1daa-4ff4-b690-1192d259178d-kube-api-access-nz8jd\") pod \"cert-manager-cainjector-7f985d654d-2dnjg\" (UID: \"9e280929-1daa-4ff4-b690-1192d259178d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.647596 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl96q\" (UniqueName: \"kubernetes.io/projected/379893b2-eb01-49f9-b70c-9e459c6c6ed0-kube-api-access-cl96q\") pod \"cert-manager-webhook-5655c58dd6-6gfx7\" (UID: \"379893b2-eb01-49f9-b70c-9e459c6c6ed0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.649259 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g6wt\" (UniqueName: \"kubernetes.io/projected/f2c74be0-fa96-4153-8159-bd67e214d601-kube-api-access-4g6wt\") pod \"cert-manager-5b446d88c5-lt7gc\" (UID: \"f2c74be0-fa96-4153-8159-bd67e214d601\") " pod="cert-manager/cert-manager-5b446d88c5-lt7gc" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.651489 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz8jd\" (UniqueName: \"kubernetes.io/projected/9e280929-1daa-4ff4-b690-1192d259178d-kube-api-access-nz8jd\") pod \"cert-manager-cainjector-7f985d654d-2dnjg\" (UID: \"9e280929-1daa-4ff4-b690-1192d259178d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.664429 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-lt7gc" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.686662 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.866607 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-lt7gc"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.879796 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.916568 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-6gfx7"] Dec 02 10:12:15 crc kubenswrapper[4685]: I1202 10:12:15.950439 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" Dec 02 10:12:16 crc kubenswrapper[4685]: I1202 10:12:16.141128 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-2dnjg"] Dec 02 10:12:16 crc kubenswrapper[4685]: W1202 10:12:16.145913 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e280929_1daa_4ff4_b690_1192d259178d.slice/crio-20119ddc3e3438470b8501663b5ddf7e3bfe7300247b0db11bb384c1885710e1 WatchSource:0}: Error finding container 20119ddc3e3438470b8501663b5ddf7e3bfe7300247b0db11bb384c1885710e1: Status 404 returned error can't find the container with id 20119ddc3e3438470b8501663b5ddf7e3bfe7300247b0db11bb384c1885710e1 Dec 02 10:12:16 crc kubenswrapper[4685]: I1202 10:12:16.691530 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" event={"ID":"9e280929-1daa-4ff4-b690-1192d259178d","Type":"ContainerStarted","Data":"20119ddc3e3438470b8501663b5ddf7e3bfe7300247b0db11bb384c1885710e1"} Dec 02 10:12:16 crc kubenswrapper[4685]: I1202 10:12:16.692893 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" event={"ID":"379893b2-eb01-49f9-b70c-9e459c6c6ed0","Type":"ContainerStarted","Data":"dc3d7b5957e969ddb5a502aa630985512047598f5266d8f72c78f5445ac5ee5c"} Dec 02 10:12:16 crc kubenswrapper[4685]: I1202 10:12:16.693909 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-lt7gc" event={"ID":"f2c74be0-fa96-4153-8159-bd67e214d601","Type":"ContainerStarted","Data":"d73de9a4819505c8d721ce6d620516fc0b7a090a81d13327fbab3855859f9b42"} Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.710149 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" event={"ID":"379893b2-eb01-49f9-b70c-9e459c6c6ed0","Type":"ContainerStarted","Data":"5c1a12d2af246bd2052f0787f0caba57ea7cf1c48b3656f13b2a1cf1bedeb621"} Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.710478 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.712314 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-lt7gc" event={"ID":"f2c74be0-fa96-4153-8159-bd67e214d601","Type":"ContainerStarted","Data":"07e189b3510ae9d0b3c1d00bea36f2746b0dc32485c7cc5d57e65b381a1cfc78"} Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.713842 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" event={"ID":"9e280929-1daa-4ff4-b690-1192d259178d","Type":"ContainerStarted","Data":"1a78bbc590a37ec5aa632016d64254e1ef9b4ba3df4e7caf1fbf37f7eeb27a4d"} Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.728345 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" podStartSLOduration=1.445215817 podStartE2EDuration="4.728317181s" podCreationTimestamp="2025-12-02 10:12:15 +0000 UTC" firstStartedPulling="2025-12-02 10:12:15.925341245 +0000 UTC m=+628.297115419" lastFinishedPulling="2025-12-02 10:12:19.208442629 +0000 UTC m=+631.580216783" observedRunningTime="2025-12-02 10:12:19.724269598 +0000 UTC m=+632.096043752" watchObservedRunningTime="2025-12-02 10:12:19.728317181 +0000 UTC m=+632.100091355" Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.740396 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-lt7gc" podStartSLOduration=1.4145287579999999 podStartE2EDuration="4.740372303s" podCreationTimestamp="2025-12-02 10:12:15 +0000 UTC" firstStartedPulling="2025-12-02 10:12:15.879486438 +0000 UTC m=+628.251260592" lastFinishedPulling="2025-12-02 10:12:19.205329983 +0000 UTC m=+631.577104137" observedRunningTime="2025-12-02 10:12:19.739019087 +0000 UTC m=+632.110793261" watchObservedRunningTime="2025-12-02 10:12:19.740372303 +0000 UTC m=+632.112146477" Dec 02 10:12:19 crc kubenswrapper[4685]: I1202 10:12:19.756051 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-2dnjg" podStartSLOduration=1.718729612 podStartE2EDuration="4.756026106s" podCreationTimestamp="2025-12-02 10:12:15 +0000 UTC" firstStartedPulling="2025-12-02 10:12:16.148383186 +0000 UTC m=+628.520157340" lastFinishedPulling="2025-12-02 10:12:19.18567968 +0000 UTC m=+631.557453834" observedRunningTime="2025-12-02 10:12:19.750306398 +0000 UTC m=+632.122080552" watchObservedRunningTime="2025-12-02 10:12:19.756026106 +0000 UTC m=+632.127800280" Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.694594 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-6gfx7" Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.695155 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7bvm"] Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.695800 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-controller" containerID="cri-o://5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.696291 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="northd" containerID="cri-o://db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.696470 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="sbdb" containerID="cri-o://8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.696520 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="nbdb" containerID="cri-o://1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.696617 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-node" containerID="cri-o://acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.696676 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.696741 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-acl-logging" containerID="cri-o://66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" gracePeriod=30 Dec 02 10:12:25 crc kubenswrapper[4685]: I1202 10:12:25.747338 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovnkube-controller" containerID="cri-o://8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" gracePeriod=30 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.051939 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7bvm_1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe/ovn-acl-logging/0.log" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.052648 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7bvm_1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe/ovn-controller/0.log" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.053282 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.119827 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5l8lt"] Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120081 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kubecfg-setup" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120101 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kubecfg-setup" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120111 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-controller" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120118 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-controller" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120129 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-acl-logging" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120138 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-acl-logging" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120148 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="sbdb" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120154 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="sbdb" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120164 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="northd" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120171 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="northd" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120180 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovnkube-controller" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120188 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovnkube-controller" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120197 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="nbdb" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120205 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="nbdb" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120217 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120224 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.120237 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-node" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120244 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-node" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120360 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-acl-logging" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120371 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovn-controller" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120379 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="nbdb" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120389 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="northd" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120401 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="sbdb" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120411 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-node" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120421 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="kube-rbac-proxy-ovn-metrics" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.120431 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerName="ovnkube-controller" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.122594 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158051 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-ovn-kubernetes\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158107 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-kubelet\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158158 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-systemd\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158179 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158226 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158338 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-etc-openvswitch\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158365 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.158642 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxjtj\" (UniqueName: \"kubernetes.io/projected/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-kube-api-access-hxjtj\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159101 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-log-socket\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159200 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-log-socket" (OuterVolumeSpecName: "log-socket") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159295 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-var-lib-openvswitch\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159362 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-config\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159400 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovn-node-metrics-cert\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159414 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159422 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-ovn\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159448 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159495 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-slash\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159531 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-node-log\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159573 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-bin\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159595 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-slash" (OuterVolumeSpecName: "host-slash") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159629 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-netns\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159656 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-script-lib\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159633 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-node-log" (OuterVolumeSpecName: "node-log") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159692 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159650 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159691 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159727 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-netd\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159679 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159725 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159741 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-systemd-units\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159818 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-env-overrides\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159761 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159776 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159856 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-openvswitch\") pod \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\" (UID: \"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe\") " Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159889 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.159971 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-node-log\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160012 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-var-lib-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160048 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-env-overrides\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160081 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-slash\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160109 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-ovnkube-script-lib\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160138 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsrst\" (UniqueName: \"kubernetes.io/projected/703127c8-f8d9-4be4-a81e-0ab75293993d-kube-api-access-qsrst\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160164 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160174 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160172 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-ovnkube-config\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160307 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-run-ovn-kubernetes\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160338 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-kubelet\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160367 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/703127c8-f8d9-4be4-a81e-0ab75293993d-ovn-node-metrics-cert\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160418 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160444 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-systemd\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160516 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-run-netns\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160616 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-systemd-units\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160638 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-cni-netd\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160664 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-ovn\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160714 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-log-socket\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160801 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-cni-bin\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160881 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.160930 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-etc-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161027 4685 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161048 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161065 4685 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161081 4685 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161098 4685 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161114 4685 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161129 4685 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161145 4685 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161162 4685 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161178 4685 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161198 4685 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161213 4685 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-log-socket\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161228 4685 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161243 4685 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161258 4685 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161274 4685 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-host-slash\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.161288 4685 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-node-log\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.163462 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-kube-api-access-hxjtj" (OuterVolumeSpecName: "kube-api-access-hxjtj") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "kube-api-access-hxjtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.163688 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.170335 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" (UID: "1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262009 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-cni-bin\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262103 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-etc-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262126 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-node-log\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262146 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-var-lib-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262151 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-cni-bin\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262184 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-etc-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262159 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262168 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-env-overrides\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262226 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-var-lib-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262248 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-slash\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262268 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-ovnkube-script-lib\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262190 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-node-log\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262293 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsrst\" (UniqueName: \"kubernetes.io/projected/703127c8-f8d9-4be4-a81e-0ab75293993d-kube-api-access-qsrst\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262309 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-slash\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262323 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-ovnkube-config\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262344 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-run-ovn-kubernetes\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262373 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-kubelet\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262396 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/703127c8-f8d9-4be4-a81e-0ab75293993d-ovn-node-metrics-cert\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262430 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-run-ovn-kubernetes\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262444 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262470 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-systemd\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262474 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-kubelet\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262507 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-run-netns\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262536 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-systemd-units\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262576 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-cni-netd\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262598 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-run-netns\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262601 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-ovn\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262627 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-ovn\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262632 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-openvswitch\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262507 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-run-systemd\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262666 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-host-cni-netd\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262676 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-log-socket\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262697 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-systemd-units\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262746 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/703127c8-f8d9-4be4-a81e-0ab75293993d-log-socket\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262790 4685 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262831 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxjtj\" (UniqueName: \"kubernetes.io/projected/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-kube-api-access-hxjtj\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262846 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.262852 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-env-overrides\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.263001 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-ovnkube-script-lib\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.263256 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/703127c8-f8d9-4be4-a81e-0ab75293993d-ovnkube-config\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.266003 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/703127c8-f8d9-4be4-a81e-0ab75293993d-ovn-node-metrics-cert\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.277714 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsrst\" (UniqueName: \"kubernetes.io/projected/703127c8-f8d9-4be4-a81e-0ab75293993d-kube-api-access-qsrst\") pod \"ovnkube-node-5l8lt\" (UID: \"703127c8-f8d9-4be4-a81e-0ab75293993d\") " pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.436631 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.751487 4685 generic.go:334] "Generic (PLEG): container finished" podID="703127c8-f8d9-4be4-a81e-0ab75293993d" containerID="b085be775ed5bdb63cd9a4d4ebbd48d4d36e6b4ad097f3899173189ff9d0af33" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.751614 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerDied","Data":"b085be775ed5bdb63cd9a4d4ebbd48d4d36e6b4ad097f3899173189ff9d0af33"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.751959 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"f2f879d6c4cedc22446d79cd114d8ef43f0b1773c174d0780a471090cb469262"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.753986 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mttcn_a09db319-6f1b-4944-8097-6df042ad0869/kube-multus/0.log" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.754046 4685 generic.go:334] "Generic (PLEG): container finished" podID="a09db319-6f1b-4944-8097-6df042ad0869" containerID="852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b" exitCode=2 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.754156 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mttcn" event={"ID":"a09db319-6f1b-4944-8097-6df042ad0869","Type":"ContainerDied","Data":"852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.754922 4685 scope.go:117] "RemoveContainer" containerID="852b092b8ee9f25cbb6757b72a83bae9e22c0d29403063ed8f427bee7488600b" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.761717 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7bvm_1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe/ovn-acl-logging/0.log" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762220 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p7bvm_1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe/ovn-controller/0.log" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762842 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762866 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762875 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762883 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762890 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762897 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" exitCode=0 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762904 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" exitCode=143 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762911 4685 generic.go:334] "Generic (PLEG): container finished" podID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" exitCode=143 Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762931 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762956 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762967 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762977 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762987 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.762996 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763006 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763016 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763021 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763028 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763035 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763041 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763054 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763060 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763065 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763071 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763076 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763081 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763086 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763093 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763101 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763107 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763112 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763117 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763122 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763127 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763132 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763137 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763142 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763149 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" event={"ID":"1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe","Type":"ContainerDied","Data":"53ea63d6f1b2731641a6920b622a764c9921dcf8e0f8549ea1518d90b2e39f71"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763157 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763163 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763167 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763172 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763177 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763182 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763186 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763191 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763196 4685 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763208 4685 scope.go:117] "RemoveContainer" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.763346 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p7bvm" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.800978 4685 scope.go:117] "RemoveContainer" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.828953 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7bvm"] Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.835641 4685 scope.go:117] "RemoveContainer" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.836189 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p7bvm"] Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.863593 4685 scope.go:117] "RemoveContainer" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.880324 4685 scope.go:117] "RemoveContainer" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.900415 4685 scope.go:117] "RemoveContainer" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.912942 4685 scope.go:117] "RemoveContainer" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.927433 4685 scope.go:117] "RemoveContainer" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.945850 4685 scope.go:117] "RemoveContainer" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.972525 4685 scope.go:117] "RemoveContainer" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.972905 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": container with ID starting with 8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c not found: ID does not exist" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.972938 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} err="failed to get container status \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": rpc error: code = NotFound desc = could not find container \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": container with ID starting with 8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.972959 4685 scope.go:117] "RemoveContainer" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.973354 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": container with ID starting with 8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f not found: ID does not exist" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.973385 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} err="failed to get container status \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": rpc error: code = NotFound desc = could not find container \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": container with ID starting with 8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.973404 4685 scope.go:117] "RemoveContainer" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.973639 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": container with ID starting with 1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972 not found: ID does not exist" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.973658 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} err="failed to get container status \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": rpc error: code = NotFound desc = could not find container \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": container with ID starting with 1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.973702 4685 scope.go:117] "RemoveContainer" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.973930 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": container with ID starting with db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8 not found: ID does not exist" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.973957 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} err="failed to get container status \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": rpc error: code = NotFound desc = could not find container \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": container with ID starting with db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.973976 4685 scope.go:117] "RemoveContainer" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.974207 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": container with ID starting with f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc not found: ID does not exist" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.974232 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} err="failed to get container status \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": rpc error: code = NotFound desc = could not find container \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": container with ID starting with f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.974249 4685 scope.go:117] "RemoveContainer" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.974460 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": container with ID starting with acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2 not found: ID does not exist" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.974485 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} err="failed to get container status \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": rpc error: code = NotFound desc = could not find container \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": container with ID starting with acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.974509 4685 scope.go:117] "RemoveContainer" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.975031 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": container with ID starting with 66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d not found: ID does not exist" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975052 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} err="failed to get container status \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": rpc error: code = NotFound desc = could not find container \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": container with ID starting with 66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975066 4685 scope.go:117] "RemoveContainer" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.975279 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": container with ID starting with 5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e not found: ID does not exist" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975308 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} err="failed to get container status \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": rpc error: code = NotFound desc = could not find container \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": container with ID starting with 5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975325 4685 scope.go:117] "RemoveContainer" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" Dec 02 10:12:26 crc kubenswrapper[4685]: E1202 10:12:26.975527 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": container with ID starting with e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57 not found: ID does not exist" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975544 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} err="failed to get container status \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": rpc error: code = NotFound desc = could not find container \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": container with ID starting with e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975555 4685 scope.go:117] "RemoveContainer" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975817 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} err="failed to get container status \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": rpc error: code = NotFound desc = could not find container \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": container with ID starting with 8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.975862 4685 scope.go:117] "RemoveContainer" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.976097 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} err="failed to get container status \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": rpc error: code = NotFound desc = could not find container \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": container with ID starting with 8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.976114 4685 scope.go:117] "RemoveContainer" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.976334 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} err="failed to get container status \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": rpc error: code = NotFound desc = could not find container \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": container with ID starting with 1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.976372 4685 scope.go:117] "RemoveContainer" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.976698 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} err="failed to get container status \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": rpc error: code = NotFound desc = could not find container \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": container with ID starting with db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.976733 4685 scope.go:117] "RemoveContainer" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.977038 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} err="failed to get container status \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": rpc error: code = NotFound desc = could not find container \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": container with ID starting with f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.977065 4685 scope.go:117] "RemoveContainer" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.977995 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} err="failed to get container status \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": rpc error: code = NotFound desc = could not find container \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": container with ID starting with acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.978021 4685 scope.go:117] "RemoveContainer" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.978366 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} err="failed to get container status \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": rpc error: code = NotFound desc = could not find container \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": container with ID starting with 66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.978391 4685 scope.go:117] "RemoveContainer" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.978637 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} err="failed to get container status \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": rpc error: code = NotFound desc = could not find container \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": container with ID starting with 5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.978657 4685 scope.go:117] "RemoveContainer" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.978989 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} err="failed to get container status \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": rpc error: code = NotFound desc = could not find container \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": container with ID starting with e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979014 4685 scope.go:117] "RemoveContainer" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979347 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} err="failed to get container status \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": rpc error: code = NotFound desc = could not find container \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": container with ID starting with 8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979386 4685 scope.go:117] "RemoveContainer" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979693 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} err="failed to get container status \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": rpc error: code = NotFound desc = could not find container \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": container with ID starting with 8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979713 4685 scope.go:117] "RemoveContainer" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979956 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} err="failed to get container status \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": rpc error: code = NotFound desc = could not find container \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": container with ID starting with 1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.979975 4685 scope.go:117] "RemoveContainer" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.980215 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} err="failed to get container status \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": rpc error: code = NotFound desc = could not find container \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": container with ID starting with db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.980233 4685 scope.go:117] "RemoveContainer" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.980544 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} err="failed to get container status \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": rpc error: code = NotFound desc = could not find container \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": container with ID starting with f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.980614 4685 scope.go:117] "RemoveContainer" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.980960 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} err="failed to get container status \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": rpc error: code = NotFound desc = could not find container \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": container with ID starting with acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.980977 4685 scope.go:117] "RemoveContainer" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981170 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} err="failed to get container status \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": rpc error: code = NotFound desc = could not find container \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": container with ID starting with 66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981190 4685 scope.go:117] "RemoveContainer" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981405 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} err="failed to get container status \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": rpc error: code = NotFound desc = could not find container \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": container with ID starting with 5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981433 4685 scope.go:117] "RemoveContainer" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981743 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} err="failed to get container status \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": rpc error: code = NotFound desc = could not find container \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": container with ID starting with e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981762 4685 scope.go:117] "RemoveContainer" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.981985 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} err="failed to get container status \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": rpc error: code = NotFound desc = could not find container \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": container with ID starting with 8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982006 4685 scope.go:117] "RemoveContainer" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982231 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} err="failed to get container status \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": rpc error: code = NotFound desc = could not find container \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": container with ID starting with 8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982257 4685 scope.go:117] "RemoveContainer" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982462 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} err="failed to get container status \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": rpc error: code = NotFound desc = could not find container \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": container with ID starting with 1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982495 4685 scope.go:117] "RemoveContainer" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982794 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} err="failed to get container status \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": rpc error: code = NotFound desc = could not find container \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": container with ID starting with db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.982826 4685 scope.go:117] "RemoveContainer" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983005 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} err="failed to get container status \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": rpc error: code = NotFound desc = could not find container \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": container with ID starting with f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983027 4685 scope.go:117] "RemoveContainer" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983357 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} err="failed to get container status \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": rpc error: code = NotFound desc = could not find container \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": container with ID starting with acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983378 4685 scope.go:117] "RemoveContainer" containerID="66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983617 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d"} err="failed to get container status \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": rpc error: code = NotFound desc = could not find container \"66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d\": container with ID starting with 66a564db20b859a92e6381a930f49d47494f7415621c002b27f5a6bfdba7d10d not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983637 4685 scope.go:117] "RemoveContainer" containerID="5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983881 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e"} err="failed to get container status \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": rpc error: code = NotFound desc = could not find container \"5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e\": container with ID starting with 5693ca5174c5e8886ad67555c7b7d57ee81972666b38492e91efacef61a43b3e not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.983902 4685 scope.go:117] "RemoveContainer" containerID="e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.984099 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57"} err="failed to get container status \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": rpc error: code = NotFound desc = could not find container \"e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57\": container with ID starting with e9b4a51be80d9cc873915f74dc516cc3e965ace498dd568c5893f6c6e5e68d57 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.984142 4685 scope.go:117] "RemoveContainer" containerID="8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.984523 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c"} err="failed to get container status \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": rpc error: code = NotFound desc = could not find container \"8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c\": container with ID starting with 8ed452d72ea3d7dbb64a83d0eb3defd2d278edc21900ba403a231043008c3a3c not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.984575 4685 scope.go:117] "RemoveContainer" containerID="8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.984842 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f"} err="failed to get container status \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": rpc error: code = NotFound desc = could not find container \"8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f\": container with ID starting with 8647ae0681a4e61bc71ae77c3af0ee038a56537bfce412899f89c7eeb85a1a4f not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.984883 4685 scope.go:117] "RemoveContainer" containerID="1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.985143 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972"} err="failed to get container status \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": rpc error: code = NotFound desc = could not find container \"1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972\": container with ID starting with 1404ec6e92ffdd0c3e7c98bbb88d53a9bd1dc67412ee19a6b427a795bfeec972 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.985164 4685 scope.go:117] "RemoveContainer" containerID="db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.985407 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8"} err="failed to get container status \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": rpc error: code = NotFound desc = could not find container \"db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8\": container with ID starting with db4f9612d416ca2920cdcddc65a018c3e25e9101f3e06949af8b6c523bfaf5c8 not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.985429 4685 scope.go:117] "RemoveContainer" containerID="f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.985839 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc"} err="failed to get container status \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": rpc error: code = NotFound desc = could not find container \"f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc\": container with ID starting with f7476ae1581058c57f652eb36f576c615fa0a933ac01bc4756409a731577a9bc not found: ID does not exist" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.985858 4685 scope.go:117] "RemoveContainer" containerID="acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2" Dec 02 10:12:26 crc kubenswrapper[4685]: I1202 10:12:26.986041 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2"} err="failed to get container status \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": rpc error: code = NotFound desc = could not find container \"acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2\": container with ID starting with acaba4d90f375c85ee5b80f22a48b308975df901f4b51aa2f64ac7f59da7b6e2 not found: ID does not exist" Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.772440 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"234a0129370aa638e2de0a77c6d386dc92216abe240862db1d6753aadafe5f78"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.772762 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"e3a5a0b5d2bd3e04e7e54b1fdda8bf295d73efab94f59bfe4746eb9cc496c5f3"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.772789 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"52f26bce7d02dd52e4dc3e10296199bbb2e01419814eed659aed08d48b4714ae"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.772804 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"d0bb5806cb1926ddddd5af0f9ec98ba440e50eeeb588f964e02237bae0e30a8b"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.772824 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"0a1c2142ed3317aee37ad9eb63aa44bfd35375b08df79c6972dd89634ee4f77c"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.772840 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"dba20bd5e6303f098011d45ae967d726dc4ab2cf27de856de8aceb2960267856"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.775154 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-mttcn_a09db319-6f1b-4944-8097-6df042ad0869/kube-multus/0.log" Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.775189 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-mttcn" event={"ID":"a09db319-6f1b-4944-8097-6df042ad0869","Type":"ContainerStarted","Data":"d66c5c21462934c31baaf1a22ce61c6acd635902b579c542a1283c082f76fcf2"} Dec 02 10:12:27 crc kubenswrapper[4685]: I1202 10:12:27.910656 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe" path="/var/lib/kubelet/pods/1cbce0ec-53d7-4ea3-a7ea-eeaf234724fe/volumes" Dec 02 10:12:29 crc kubenswrapper[4685]: I1202 10:12:29.793291 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"6d535da420cd8257b2372a2966d8457a06aa64eb5fd8382ee7534a7bd8b464c8"} Dec 02 10:12:32 crc kubenswrapper[4685]: I1202 10:12:32.813890 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" event={"ID":"703127c8-f8d9-4be4-a81e-0ab75293993d","Type":"ContainerStarted","Data":"eb6d2257888562908ee8cc841f512d9010ff3d55959289b69a353659d839b167"} Dec 02 10:12:32 crc kubenswrapper[4685]: I1202 10:12:32.814533 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:32 crc kubenswrapper[4685]: I1202 10:12:32.814553 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:32 crc kubenswrapper[4685]: I1202 10:12:32.840264 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:32 crc kubenswrapper[4685]: I1202 10:12:32.847921 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" podStartSLOduration=6.847901841 podStartE2EDuration="6.847901841s" podCreationTimestamp="2025-12-02 10:12:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:12:32.845544206 +0000 UTC m=+645.217318380" watchObservedRunningTime="2025-12-02 10:12:32.847901841 +0000 UTC m=+645.219675985" Dec 02 10:12:33 crc kubenswrapper[4685]: I1202 10:12:33.821968 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:33 crc kubenswrapper[4685]: I1202 10:12:33.851542 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:12:45 crc kubenswrapper[4685]: I1202 10:12:45.045826 4685 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 02 10:12:56 crc kubenswrapper[4685]: I1202 10:12:56.458016 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5l8lt" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.126054 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq"] Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.127504 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.133523 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.135742 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq"] Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.264731 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.264775 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8g42\" (UniqueName: \"kubernetes.io/projected/ae4a80df-9f50-4df0-8376-975da5b85e38-kube-api-access-g8g42\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.264800 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.366357 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.366666 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8g42\" (UniqueName: \"kubernetes.io/projected/ae4a80df-9f50-4df0-8376-975da5b85e38-kube-api-access-g8g42\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.366766 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.366962 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.367306 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.389229 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8g42\" (UniqueName: \"kubernetes.io/projected/ae4a80df-9f50-4df0-8376-975da5b85e38-kube-api-access-g8g42\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.444187 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:06 crc kubenswrapper[4685]: I1202 10:13:06.630848 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq"] Dec 02 10:13:06 crc kubenswrapper[4685]: W1202 10:13:06.637672 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae4a80df_9f50_4df0_8376_975da5b85e38.slice/crio-4fe896b0deadba141a7a502d17d24b97520f53d353dc56fa8aaae6134d5d6694 WatchSource:0}: Error finding container 4fe896b0deadba141a7a502d17d24b97520f53d353dc56fa8aaae6134d5d6694: Status 404 returned error can't find the container with id 4fe896b0deadba141a7a502d17d24b97520f53d353dc56fa8aaae6134d5d6694 Dec 02 10:13:07 crc kubenswrapper[4685]: I1202 10:13:07.003893 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" event={"ID":"ae4a80df-9f50-4df0-8376-975da5b85e38","Type":"ContainerStarted","Data":"192096c5b6b3920031aff6879ab167ba425a5e9fe1672d510eb10c232b6276d7"} Dec 02 10:13:07 crc kubenswrapper[4685]: I1202 10:13:07.004192 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" event={"ID":"ae4a80df-9f50-4df0-8376-975da5b85e38","Type":"ContainerStarted","Data":"4fe896b0deadba141a7a502d17d24b97520f53d353dc56fa8aaae6134d5d6694"} Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.012774 4685 generic.go:334] "Generic (PLEG): container finished" podID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerID="192096c5b6b3920031aff6879ab167ba425a5e9fe1672d510eb10c232b6276d7" exitCode=0 Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.012818 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" event={"ID":"ae4a80df-9f50-4df0-8376-975da5b85e38","Type":"ContainerDied","Data":"192096c5b6b3920031aff6879ab167ba425a5e9fe1672d510eb10c232b6276d7"} Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.476579 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dzwgq"] Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.478848 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.484078 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dzwgq"] Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.592143 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gcgm\" (UniqueName: \"kubernetes.io/projected/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-kube-api-access-7gcgm\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.592226 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-utilities\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.592303 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-catalog-content\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.693328 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-utilities\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.693402 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-catalog-content\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.693442 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gcgm\" (UniqueName: \"kubernetes.io/projected/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-kube-api-access-7gcgm\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.693907 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-utilities\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.693985 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-catalog-content\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.718668 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gcgm\" (UniqueName: \"kubernetes.io/projected/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-kube-api-access-7gcgm\") pod \"redhat-operators-dzwgq\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.794870 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:08 crc kubenswrapper[4685]: I1202 10:13:08.990331 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dzwgq"] Dec 02 10:13:09 crc kubenswrapper[4685]: I1202 10:13:09.021189 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerStarted","Data":"d2bf721550f3b5009543d4478a9df646ab27dd9396c173017b518b0fe9269f70"} Dec 02 10:13:10 crc kubenswrapper[4685]: I1202 10:13:10.032502 4685 generic.go:334] "Generic (PLEG): container finished" podID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerID="8052c8486a8641bd8b8bbc27983b7404598b855f6fabd7af74a283a108453242" exitCode=0 Dec 02 10:13:10 crc kubenswrapper[4685]: I1202 10:13:10.032611 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" event={"ID":"ae4a80df-9f50-4df0-8376-975da5b85e38","Type":"ContainerDied","Data":"8052c8486a8641bd8b8bbc27983b7404598b855f6fabd7af74a283a108453242"} Dec 02 10:13:10 crc kubenswrapper[4685]: I1202 10:13:10.035274 4685 generic.go:334] "Generic (PLEG): container finished" podID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerID="534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82" exitCode=0 Dec 02 10:13:10 crc kubenswrapper[4685]: I1202 10:13:10.035310 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerDied","Data":"534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82"} Dec 02 10:13:11 crc kubenswrapper[4685]: I1202 10:13:11.042533 4685 generic.go:334] "Generic (PLEG): container finished" podID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerID="2ca78c0841de31e680933539d4f6e36b51a1ecf4fefc2c23ad9c489f6381d052" exitCode=0 Dec 02 10:13:11 crc kubenswrapper[4685]: I1202 10:13:11.042666 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" event={"ID":"ae4a80df-9f50-4df0-8376-975da5b85e38","Type":"ContainerDied","Data":"2ca78c0841de31e680933539d4f6e36b51a1ecf4fefc2c23ad9c489f6381d052"} Dec 02 10:13:11 crc kubenswrapper[4685]: I1202 10:13:11.045732 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerStarted","Data":"2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f"} Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.056469 4685 generic.go:334] "Generic (PLEG): container finished" podID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerID="2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f" exitCode=0 Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.056606 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerDied","Data":"2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f"} Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.147706 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.147779 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.296379 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.446508 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-util\") pod \"ae4a80df-9f50-4df0-8376-975da5b85e38\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.446662 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8g42\" (UniqueName: \"kubernetes.io/projected/ae4a80df-9f50-4df0-8376-975da5b85e38-kube-api-access-g8g42\") pod \"ae4a80df-9f50-4df0-8376-975da5b85e38\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.446707 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-bundle\") pod \"ae4a80df-9f50-4df0-8376-975da5b85e38\" (UID: \"ae4a80df-9f50-4df0-8376-975da5b85e38\") " Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.447847 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-bundle" (OuterVolumeSpecName: "bundle") pod "ae4a80df-9f50-4df0-8376-975da5b85e38" (UID: "ae4a80df-9f50-4df0-8376-975da5b85e38"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.453970 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae4a80df-9f50-4df0-8376-975da5b85e38-kube-api-access-g8g42" (OuterVolumeSpecName: "kube-api-access-g8g42") pod "ae4a80df-9f50-4df0-8376-975da5b85e38" (UID: "ae4a80df-9f50-4df0-8376-975da5b85e38"). InnerVolumeSpecName "kube-api-access-g8g42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.514520 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-util" (OuterVolumeSpecName: "util") pod "ae4a80df-9f50-4df0-8376-975da5b85e38" (UID: "ae4a80df-9f50-4df0-8376-975da5b85e38"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.548203 4685 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.548236 4685 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ae4a80df-9f50-4df0-8376-975da5b85e38-util\") on node \"crc\" DevicePath \"\"" Dec 02 10:13:12 crc kubenswrapper[4685]: I1202 10:13:12.548246 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8g42\" (UniqueName: \"kubernetes.io/projected/ae4a80df-9f50-4df0-8376-975da5b85e38-kube-api-access-g8g42\") on node \"crc\" DevicePath \"\"" Dec 02 10:13:13 crc kubenswrapper[4685]: I1202 10:13:13.068538 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" event={"ID":"ae4a80df-9f50-4df0-8376-975da5b85e38","Type":"ContainerDied","Data":"4fe896b0deadba141a7a502d17d24b97520f53d353dc56fa8aaae6134d5d6694"} Dec 02 10:13:13 crc kubenswrapper[4685]: I1202 10:13:13.068599 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4fe896b0deadba141a7a502d17d24b97520f53d353dc56fa8aaae6134d5d6694" Dec 02 10:13:13 crc kubenswrapper[4685]: I1202 10:13:13.068645 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq" Dec 02 10:13:14 crc kubenswrapper[4685]: I1202 10:13:14.075952 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerStarted","Data":"f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858"} Dec 02 10:13:14 crc kubenswrapper[4685]: I1202 10:13:14.099480 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dzwgq" podStartSLOduration=3.286641297 podStartE2EDuration="6.09946102s" podCreationTimestamp="2025-12-02 10:13:08 +0000 UTC" firstStartedPulling="2025-12-02 10:13:10.037502438 +0000 UTC m=+682.409276632" lastFinishedPulling="2025-12-02 10:13:12.850322181 +0000 UTC m=+685.222096355" observedRunningTime="2025-12-02 10:13:14.097348263 +0000 UTC m=+686.469122427" watchObservedRunningTime="2025-12-02 10:13:14.09946102 +0000 UTC m=+686.471235174" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.472728 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz"] Dec 02 10:13:17 crc kubenswrapper[4685]: E1202 10:13:17.473272 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="util" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.473289 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="util" Dec 02 10:13:17 crc kubenswrapper[4685]: E1202 10:13:17.473305 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="extract" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.473312 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="extract" Dec 02 10:13:17 crc kubenswrapper[4685]: E1202 10:13:17.473325 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="pull" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.473333 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="pull" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.473468 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae4a80df-9f50-4df0-8376-975da5b85e38" containerName="extract" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.473973 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.476362 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-f8hcb" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.478526 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.478669 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.503206 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz"] Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.615846 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g5z9\" (UniqueName: \"kubernetes.io/projected/4b250238-0336-4009-b01e-2a469bf59e33-kube-api-access-4g5z9\") pod \"nmstate-operator-5b5b58f5c8-wklzz\" (UID: \"4b250238-0336-4009-b01e-2a469bf59e33\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.716892 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g5z9\" (UniqueName: \"kubernetes.io/projected/4b250238-0336-4009-b01e-2a469bf59e33-kube-api-access-4g5z9\") pod \"nmstate-operator-5b5b58f5c8-wklzz\" (UID: \"4b250238-0336-4009-b01e-2a469bf59e33\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.735596 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g5z9\" (UniqueName: \"kubernetes.io/projected/4b250238-0336-4009-b01e-2a469bf59e33-kube-api-access-4g5z9\") pod \"nmstate-operator-5b5b58f5c8-wklzz\" (UID: \"4b250238-0336-4009-b01e-2a469bf59e33\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" Dec 02 10:13:17 crc kubenswrapper[4685]: I1202 10:13:17.807996 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" Dec 02 10:13:18 crc kubenswrapper[4685]: I1202 10:13:18.243723 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz"] Dec 02 10:13:18 crc kubenswrapper[4685]: I1202 10:13:18.795719 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:18 crc kubenswrapper[4685]: I1202 10:13:18.795772 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:18 crc kubenswrapper[4685]: I1202 10:13:18.830026 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:19 crc kubenswrapper[4685]: I1202 10:13:19.102371 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" event={"ID":"4b250238-0336-4009-b01e-2a469bf59e33","Type":"ContainerStarted","Data":"79db3cbbc946273f3ba1394c7ea4fca1b487881c5ebed3bc8e1c172da9955804"} Dec 02 10:13:19 crc kubenswrapper[4685]: I1202 10:13:19.154133 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.061643 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dzwgq"] Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.114481 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" event={"ID":"4b250238-0336-4009-b01e-2a469bf59e33","Type":"ContainerStarted","Data":"64c2fdba3e827efaf8809d7f196064ed7fb5cf557faf5953687b00d73f5bca21"} Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.114683 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dzwgq" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="registry-server" containerID="cri-o://f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858" gracePeriod=2 Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.137583 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-wklzz" podStartSLOduration=1.617665986 podStartE2EDuration="4.13754532s" podCreationTimestamp="2025-12-02 10:13:17 +0000 UTC" firstStartedPulling="2025-12-02 10:13:18.252850203 +0000 UTC m=+690.624624357" lastFinishedPulling="2025-12-02 10:13:20.772729537 +0000 UTC m=+693.144503691" observedRunningTime="2025-12-02 10:13:21.128416459 +0000 UTC m=+693.500190613" watchObservedRunningTime="2025-12-02 10:13:21.13754532 +0000 UTC m=+693.509319474" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.441466 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.464303 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-catalog-content\") pod \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.464367 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-utilities\") pod \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.464415 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gcgm\" (UniqueName: \"kubernetes.io/projected/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-kube-api-access-7gcgm\") pod \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\" (UID: \"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99\") " Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.465276 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-utilities" (OuterVolumeSpecName: "utilities") pod "3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" (UID: "3e6e7bb9-a9ac-42bc-8f2d-7120da26be99"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.470940 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-kube-api-access-7gcgm" (OuterVolumeSpecName: "kube-api-access-7gcgm") pod "3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" (UID: "3e6e7bb9-a9ac-42bc-8f2d-7120da26be99"). InnerVolumeSpecName "kube-api-access-7gcgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.566339 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gcgm\" (UniqueName: \"kubernetes.io/projected/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-kube-api-access-7gcgm\") on node \"crc\" DevicePath \"\"" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.566683 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.600314 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" (UID: "3e6e7bb9-a9ac-42bc-8f2d-7120da26be99"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:13:21 crc kubenswrapper[4685]: I1202 10:13:21.667793 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.126120 4685 generic.go:334] "Generic (PLEG): container finished" podID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerID="f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858" exitCode=0 Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.126220 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerDied","Data":"f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858"} Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.126235 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dzwgq" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.126289 4685 scope.go:117] "RemoveContainer" containerID="f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.126275 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dzwgq" event={"ID":"3e6e7bb9-a9ac-42bc-8f2d-7120da26be99","Type":"ContainerDied","Data":"d2bf721550f3b5009543d4478a9df646ab27dd9396c173017b518b0fe9269f70"} Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.149690 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dzwgq"] Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.153144 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dzwgq"] Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.156302 4685 scope.go:117] "RemoveContainer" containerID="2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.193001 4685 scope.go:117] "RemoveContainer" containerID="534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.208766 4685 scope.go:117] "RemoveContainer" containerID="f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858" Dec 02 10:13:22 crc kubenswrapper[4685]: E1202 10:13:22.209442 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858\": container with ID starting with f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858 not found: ID does not exist" containerID="f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.209519 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858"} err="failed to get container status \"f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858\": rpc error: code = NotFound desc = could not find container \"f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858\": container with ID starting with f0a44b564321f8ced50069b9728315e41516393cf6ee7ffc55498796f75e8858 not found: ID does not exist" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.209686 4685 scope.go:117] "RemoveContainer" containerID="2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f" Dec 02 10:13:22 crc kubenswrapper[4685]: E1202 10:13:22.210283 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f\": container with ID starting with 2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f not found: ID does not exist" containerID="2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.210461 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f"} err="failed to get container status \"2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f\": rpc error: code = NotFound desc = could not find container \"2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f\": container with ID starting with 2e548042fccb5e55cd7ee8eb7f653c8c2d2bf1030de60dcdd668921d96ebb25f not found: ID does not exist" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.210644 4685 scope.go:117] "RemoveContainer" containerID="534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82" Dec 02 10:13:22 crc kubenswrapper[4685]: E1202 10:13:22.211120 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82\": container with ID starting with 534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82 not found: ID does not exist" containerID="534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82" Dec 02 10:13:22 crc kubenswrapper[4685]: I1202 10:13:22.211183 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82"} err="failed to get container status \"534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82\": rpc error: code = NotFound desc = could not find container \"534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82\": container with ID starting with 534e62e0ea0b9d1888439e89993091592106f1f75303859193c95d7aef6d0c82 not found: ID does not exist" Dec 02 10:13:23 crc kubenswrapper[4685]: I1202 10:13:23.911781 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" path="/var/lib/kubelet/pods/3e6e7bb9-a9ac-42bc-8f2d-7120da26be99/volumes" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.591342 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw"] Dec 02 10:13:26 crc kubenswrapper[4685]: E1202 10:13:26.591636 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="extract-utilities" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.591648 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="extract-utilities" Dec 02 10:13:26 crc kubenswrapper[4685]: E1202 10:13:26.591664 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="extract-content" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.591670 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="extract-content" Dec 02 10:13:26 crc kubenswrapper[4685]: E1202 10:13:26.591679 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="registry-server" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.591685 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="registry-server" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.591786 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6e7bb9-a9ac-42bc-8f2d-7120da26be99" containerName="registry-server" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.592317 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.595095 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-nprgb" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.624767 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8"] Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.625653 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.631406 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.639912 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97tpm\" (UniqueName: \"kubernetes.io/projected/b7d57eeb-24b9-461f-ab36-86ffcf831603-kube-api-access-97tpm\") pod \"nmstate-metrics-7f946cbc9-zshhw\" (UID: \"b7d57eeb-24b9-461f-ab36-86ffcf831603\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.653621 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-wbwxh"] Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.654358 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.660450 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8"] Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.730723 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw"] Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741016 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-dbus-socket\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741060 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-nmstate-lock\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741088 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5989\" (UniqueName: \"kubernetes.io/projected/8d950d2d-44b3-4441-9434-11fc6a39dca4-kube-api-access-p5989\") pod \"nmstate-webhook-5f6d4c5ccb-2khp8\" (UID: \"8d950d2d-44b3-4441-9434-11fc6a39dca4\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741110 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4974\" (UniqueName: \"kubernetes.io/projected/dfaa292c-cbfc-495c-8b18-768606608d14-kube-api-access-z4974\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741151 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-ovs-socket\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741174 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97tpm\" (UniqueName: \"kubernetes.io/projected/b7d57eeb-24b9-461f-ab36-86ffcf831603-kube-api-access-97tpm\") pod \"nmstate-metrics-7f946cbc9-zshhw\" (UID: \"b7d57eeb-24b9-461f-ab36-86ffcf831603\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.741190 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8d950d2d-44b3-4441-9434-11fc6a39dca4-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-2khp8\" (UID: \"8d950d2d-44b3-4441-9434-11fc6a39dca4\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.781035 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97tpm\" (UniqueName: \"kubernetes.io/projected/b7d57eeb-24b9-461f-ab36-86ffcf831603-kube-api-access-97tpm\") pod \"nmstate-metrics-7f946cbc9-zshhw\" (UID: \"b7d57eeb-24b9-461f-ab36-86ffcf831603\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.842831 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-dbus-socket\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.842890 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-nmstate-lock\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.842922 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5989\" (UniqueName: \"kubernetes.io/projected/8d950d2d-44b3-4441-9434-11fc6a39dca4-kube-api-access-p5989\") pod \"nmstate-webhook-5f6d4c5ccb-2khp8\" (UID: \"8d950d2d-44b3-4441-9434-11fc6a39dca4\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.842941 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4974\" (UniqueName: \"kubernetes.io/projected/dfaa292c-cbfc-495c-8b18-768606608d14-kube-api-access-z4974\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.842995 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-ovs-socket\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.843098 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-ovs-socket\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.843104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-dbus-socket\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.843177 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/dfaa292c-cbfc-495c-8b18-768606608d14-nmstate-lock\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.843398 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8d950d2d-44b3-4441-9434-11fc6a39dca4-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-2khp8\" (UID: \"8d950d2d-44b3-4441-9434-11fc6a39dca4\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.847005 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/8d950d2d-44b3-4441-9434-11fc6a39dca4-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-2khp8\" (UID: \"8d950d2d-44b3-4441-9434-11fc6a39dca4\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.861024 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4974\" (UniqueName: \"kubernetes.io/projected/dfaa292c-cbfc-495c-8b18-768606608d14-kube-api-access-z4974\") pod \"nmstate-handler-wbwxh\" (UID: \"dfaa292c-cbfc-495c-8b18-768606608d14\") " pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.868066 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5989\" (UniqueName: \"kubernetes.io/projected/8d950d2d-44b3-4441-9434-11fc6a39dca4-kube-api-access-p5989\") pod \"nmstate-webhook-5f6d4c5ccb-2khp8\" (UID: \"8d950d2d-44b3-4441-9434-11fc6a39dca4\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.875843 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q"] Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.876534 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.883859 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.883907 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-mz5nc" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.884542 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.893345 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q"] Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.908520 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.937293 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.944918 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a900b2-3134-445e-8349-4ffab8e264bb-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.944958 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7cwd\" (UniqueName: \"kubernetes.io/projected/c8a900b2-3134-445e-8349-4ffab8e264bb-kube-api-access-w7cwd\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.945038 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c8a900b2-3134-445e-8349-4ffab8e264bb-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:26 crc kubenswrapper[4685]: I1202 10:13:26.966046 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.046099 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a900b2-3134-445e-8349-4ffab8e264bb-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.046403 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7cwd\" (UniqueName: \"kubernetes.io/projected/c8a900b2-3134-445e-8349-4ffab8e264bb-kube-api-access-w7cwd\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.046456 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c8a900b2-3134-445e-8349-4ffab8e264bb-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: E1202 10:13:27.046280 4685 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 02 10:13:27 crc kubenswrapper[4685]: E1202 10:13:27.046583 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8a900b2-3134-445e-8349-4ffab8e264bb-plugin-serving-cert podName:c8a900b2-3134-445e-8349-4ffab8e264bb nodeName:}" failed. No retries permitted until 2025-12-02 10:13:27.546544808 +0000 UTC m=+699.918318962 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/c8a900b2-3134-445e-8349-4ffab8e264bb-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-dtq9q" (UID: "c8a900b2-3134-445e-8349-4ffab8e264bb") : secret "plugin-serving-cert" not found Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.047317 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c8a900b2-3134-445e-8349-4ffab8e264bb-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.080641 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7cwd\" (UniqueName: \"kubernetes.io/projected/c8a900b2-3134-445e-8349-4ffab8e264bb-kube-api-access-w7cwd\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.113132 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6cc7fc45d4-m82dj"] Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.113762 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.131589 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6cc7fc45d4-m82dj"] Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153693 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe422aa-f408-4324-914a-59f46f5bc11f-console-serving-cert\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153734 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7fe422aa-f408-4324-914a-59f46f5bc11f-console-oauth-config\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153757 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-console-config\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153780 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-oauth-serving-cert\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153850 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckm25\" (UniqueName: \"kubernetes.io/projected/7fe422aa-f408-4324-914a-59f46f5bc11f-kube-api-access-ckm25\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153867 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-service-ca\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.153888 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-trusted-ca-bundle\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.165295 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wbwxh" event={"ID":"dfaa292c-cbfc-495c-8b18-768606608d14","Type":"ContainerStarted","Data":"220c144f0518e42e2e871a943847fc5c0cc201184f906265ab6fe3d2c05f2308"} Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.229287 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8"] Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255384 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckm25\" (UniqueName: \"kubernetes.io/projected/7fe422aa-f408-4324-914a-59f46f5bc11f-kube-api-access-ckm25\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255427 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-service-ca\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255459 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-trusted-ca-bundle\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255484 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe422aa-f408-4324-914a-59f46f5bc11f-console-serving-cert\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255500 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7fe422aa-f408-4324-914a-59f46f5bc11f-console-oauth-config\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255522 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-console-config\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.255544 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-oauth-serving-cert\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.256823 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-oauth-serving-cert\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.256962 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-service-ca\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.257194 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-trusted-ca-bundle\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.257614 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7fe422aa-f408-4324-914a-59f46f5bc11f-console-config\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.259517 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7fe422aa-f408-4324-914a-59f46f5bc11f-console-serving-cert\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.261163 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7fe422aa-f408-4324-914a-59f46f5bc11f-console-oauth-config\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.270782 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckm25\" (UniqueName: \"kubernetes.io/projected/7fe422aa-f408-4324-914a-59f46f5bc11f-kube-api-access-ckm25\") pod \"console-6cc7fc45d4-m82dj\" (UID: \"7fe422aa-f408-4324-914a-59f46f5bc11f\") " pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.382306 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw"] Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.454914 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.558547 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a900b2-3134-445e-8349-4ffab8e264bb-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.562530 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8a900b2-3134-445e-8349-4ffab8e264bb-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dtq9q\" (UID: \"c8a900b2-3134-445e-8349-4ffab8e264bb\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.645737 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6cc7fc45d4-m82dj"] Dec 02 10:13:27 crc kubenswrapper[4685]: W1202 10:13:27.651589 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fe422aa_f408_4324_914a_59f46f5bc11f.slice/crio-8d9699061960170e70dd39b4c3071a1819dee1d82863100db3840b3ae2a46f76 WatchSource:0}: Error finding container 8d9699061960170e70dd39b4c3071a1819dee1d82863100db3840b3ae2a46f76: Status 404 returned error can't find the container with id 8d9699061960170e70dd39b4c3071a1819dee1d82863100db3840b3ae2a46f76 Dec 02 10:13:27 crc kubenswrapper[4685]: I1202 10:13:27.808607 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.034990 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q"] Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.172271 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6cc7fc45d4-m82dj" event={"ID":"7fe422aa-f408-4324-914a-59f46f5bc11f","Type":"ContainerStarted","Data":"fb144512ccaeb63fce2d672b6fe1ca27c61e43d8a36030891f97ebec806c791e"} Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.172317 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6cc7fc45d4-m82dj" event={"ID":"7fe422aa-f408-4324-914a-59f46f5bc11f","Type":"ContainerStarted","Data":"8d9699061960170e70dd39b4c3071a1819dee1d82863100db3840b3ae2a46f76"} Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.173764 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" event={"ID":"b7d57eeb-24b9-461f-ab36-86ffcf831603","Type":"ContainerStarted","Data":"40318ad215b58ed320b4f091475def5a498c7872efd85ae5c0860424887346ff"} Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.175108 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" event={"ID":"8d950d2d-44b3-4441-9434-11fc6a39dca4","Type":"ContainerStarted","Data":"cf4d3d11a030cce2cdda0d7e11f6c18475645f66a69ed140a5f3c386156007ef"} Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.177001 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" event={"ID":"c8a900b2-3134-445e-8349-4ffab8e264bb","Type":"ContainerStarted","Data":"2a397507ce839ffc604e53b3b88b76114f5383cea1c05835059d18de30fbb08e"} Dec 02 10:13:28 crc kubenswrapper[4685]: I1202 10:13:28.194384 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6cc7fc45d4-m82dj" podStartSLOduration=1.194369604 podStartE2EDuration="1.194369604s" podCreationTimestamp="2025-12-02 10:13:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:13:28.189944382 +0000 UTC m=+700.561718536" watchObservedRunningTime="2025-12-02 10:13:28.194369604 +0000 UTC m=+700.566143758" Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.188481 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" event={"ID":"b7d57eeb-24b9-461f-ab36-86ffcf831603","Type":"ContainerStarted","Data":"434433cbed53406fe7181ece6c4c8cbfa12cbc8fa21876ba9973e5287024bc9a"} Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.190269 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" event={"ID":"8d950d2d-44b3-4441-9434-11fc6a39dca4","Type":"ContainerStarted","Data":"355ca15da4bc46f5532597cafd48030d18d7c457c170525bbebbd74f1734a031"} Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.190399 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.193077 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wbwxh" event={"ID":"dfaa292c-cbfc-495c-8b18-768606608d14","Type":"ContainerStarted","Data":"2cacee73b52a2b0d9ffe0e49949e63190f53ccdfbdd2a4e599a1f57a8909e9df"} Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.193521 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.204756 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" podStartSLOduration=1.866666561 podStartE2EDuration="4.204736318s" podCreationTimestamp="2025-12-02 10:13:26 +0000 UTC" firstStartedPulling="2025-12-02 10:13:27.238100211 +0000 UTC m=+699.609874365" lastFinishedPulling="2025-12-02 10:13:29.576169968 +0000 UTC m=+701.947944122" observedRunningTime="2025-12-02 10:13:30.202295371 +0000 UTC m=+702.574069525" watchObservedRunningTime="2025-12-02 10:13:30.204736318 +0000 UTC m=+702.576510472" Dec 02 10:13:30 crc kubenswrapper[4685]: I1202 10:13:30.223252 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-wbwxh" podStartSLOduration=1.700146475 podStartE2EDuration="4.223235396s" podCreationTimestamp="2025-12-02 10:13:26 +0000 UTC" firstStartedPulling="2025-12-02 10:13:27.008082921 +0000 UTC m=+699.379857075" lastFinishedPulling="2025-12-02 10:13:29.531171842 +0000 UTC m=+701.902945996" observedRunningTime="2025-12-02 10:13:30.220683116 +0000 UTC m=+702.592457300" watchObservedRunningTime="2025-12-02 10:13:30.223235396 +0000 UTC m=+702.595009540" Dec 02 10:13:31 crc kubenswrapper[4685]: I1202 10:13:31.204391 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" event={"ID":"c8a900b2-3134-445e-8349-4ffab8e264bb","Type":"ContainerStarted","Data":"dfdac5c8a81f55b6425177a95ae4d6b538991ff7ac1066215e802f7366fc0055"} Dec 02 10:13:31 crc kubenswrapper[4685]: I1202 10:13:31.223497 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dtq9q" podStartSLOduration=2.793098425 podStartE2EDuration="5.223481298s" podCreationTimestamp="2025-12-02 10:13:26 +0000 UTC" firstStartedPulling="2025-12-02 10:13:28.051289293 +0000 UTC m=+700.423063447" lastFinishedPulling="2025-12-02 10:13:30.481672166 +0000 UTC m=+702.853446320" observedRunningTime="2025-12-02 10:13:31.219300303 +0000 UTC m=+703.591074457" watchObservedRunningTime="2025-12-02 10:13:31.223481298 +0000 UTC m=+703.595255452" Dec 02 10:13:32 crc kubenswrapper[4685]: I1202 10:13:32.209298 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" event={"ID":"b7d57eeb-24b9-461f-ab36-86ffcf831603","Type":"ContainerStarted","Data":"d39fc7f67e77c3321905b1fece712f2529e53bb758498c4ce29c9583c65f4f22"} Dec 02 10:13:32 crc kubenswrapper[4685]: I1202 10:13:32.234258 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-zshhw" podStartSLOduration=1.573924146 podStartE2EDuration="6.234237218s" podCreationTimestamp="2025-12-02 10:13:26 +0000 UTC" firstStartedPulling="2025-12-02 10:13:27.385910521 +0000 UTC m=+699.757684675" lastFinishedPulling="2025-12-02 10:13:32.046223583 +0000 UTC m=+704.417997747" observedRunningTime="2025-12-02 10:13:32.225148279 +0000 UTC m=+704.596922443" watchObservedRunningTime="2025-12-02 10:13:32.234237218 +0000 UTC m=+704.606011372" Dec 02 10:13:36 crc kubenswrapper[4685]: I1202 10:13:36.998677 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-wbwxh" Dec 02 10:13:37 crc kubenswrapper[4685]: I1202 10:13:37.456384 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:37 crc kubenswrapper[4685]: I1202 10:13:37.456488 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:37 crc kubenswrapper[4685]: I1202 10:13:37.469455 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:38 crc kubenswrapper[4685]: I1202 10:13:38.248156 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6cc7fc45d4-m82dj" Dec 02 10:13:38 crc kubenswrapper[4685]: I1202 10:13:38.314267 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-hgnrz"] Dec 02 10:13:42 crc kubenswrapper[4685]: I1202 10:13:42.147803 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:13:42 crc kubenswrapper[4685]: I1202 10:13:42.148293 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:13:46 crc kubenswrapper[4685]: I1202 10:13:46.946154 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-2khp8" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.659124 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc"] Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.660637 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.662714 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.675193 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc"] Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.791674 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.791763 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmzzt\" (UniqueName: \"kubernetes.io/projected/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-kube-api-access-kmzzt\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.791816 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.893254 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmzzt\" (UniqueName: \"kubernetes.io/projected/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-kube-api-access-kmzzt\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.893322 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.893378 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.893920 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.893928 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.917821 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmzzt\" (UniqueName: \"kubernetes.io/projected/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-kube-api-access-kmzzt\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:13:59 crc kubenswrapper[4685]: I1202 10:13:59.986934 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:14:00 crc kubenswrapper[4685]: I1202 10:14:00.387980 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc"] Dec 02 10:14:01 crc kubenswrapper[4685]: I1202 10:14:01.381793 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" event={"ID":"a8daea87-dd6c-4d94-bc95-69ee4a7813a9","Type":"ContainerStarted","Data":"dc4ba646e133169ce16b66a3c39d11b4c4bea7210d921545ab0e904e6cd883fe"} Dec 02 10:14:01 crc kubenswrapper[4685]: I1202 10:14:01.383454 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" event={"ID":"a8daea87-dd6c-4d94-bc95-69ee4a7813a9","Type":"ContainerStarted","Data":"3e078540d684cdc3bd76c24444213202f3a992bc04248e7b601f80f117e0dfe3"} Dec 02 10:14:02 crc kubenswrapper[4685]: I1202 10:14:02.397767 4685 generic.go:334] "Generic (PLEG): container finished" podID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerID="dc4ba646e133169ce16b66a3c39d11b4c4bea7210d921545ab0e904e6cd883fe" exitCode=0 Dec 02 10:14:02 crc kubenswrapper[4685]: I1202 10:14:02.397931 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" event={"ID":"a8daea87-dd6c-4d94-bc95-69ee4a7813a9","Type":"ContainerDied","Data":"dc4ba646e133169ce16b66a3c39d11b4c4bea7210d921545ab0e904e6cd883fe"} Dec 02 10:14:03 crc kubenswrapper[4685]: I1202 10:14:03.363164 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-hgnrz" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" containerID="cri-o://92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc" gracePeriod=15 Dec 02 10:14:03 crc kubenswrapper[4685]: I1202 10:14:03.864219 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hgnrz_4e159366-bd40-43f3-9b3b-c818913c957a/console/0.log" Dec 02 10:14:03 crc kubenswrapper[4685]: I1202 10:14:03.864988 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.049464 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-console-config\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.049921 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-oauth-config\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.050207 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-service-ca\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.050374 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9sr6\" (UniqueName: \"kubernetes.io/projected/4e159366-bd40-43f3-9b3b-c818913c957a-kube-api-access-x9sr6\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.050635 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-oauth-serving-cert\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.051177 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-serving-cert\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.051382 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-trusted-ca-bundle\") pod \"4e159366-bd40-43f3-9b3b-c818913c957a\" (UID: \"4e159366-bd40-43f3-9b3b-c818913c957a\") " Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.051597 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-console-config" (OuterVolumeSpecName: "console-config") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.052146 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-service-ca" (OuterVolumeSpecName: "service-ca") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.052158 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.052428 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.056125 4685 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-service-ca\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.056291 4685 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.056414 4685 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.056619 4685 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4e159366-bd40-43f3-9b3b-c818913c957a-console-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.057680 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.058903 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e159366-bd40-43f3-9b3b-c818913c957a-kube-api-access-x9sr6" (OuterVolumeSpecName: "kube-api-access-x9sr6") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "kube-api-access-x9sr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.061296 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "4e159366-bd40-43f3-9b3b-c818913c957a" (UID: "4e159366-bd40-43f3-9b3b-c818913c957a"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.157729 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9sr6\" (UniqueName: \"kubernetes.io/projected/4e159366-bd40-43f3-9b3b-c818913c957a-kube-api-access-x9sr6\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.157770 4685 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.157779 4685 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4e159366-bd40-43f3-9b3b-c818913c957a-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.413290 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-hgnrz_4e159366-bd40-43f3-9b3b-c818913c957a/console/0.log" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.413334 4685 generic.go:334] "Generic (PLEG): container finished" podID="4e159366-bd40-43f3-9b3b-c818913c957a" containerID="92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc" exitCode=2 Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.413384 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hgnrz" event={"ID":"4e159366-bd40-43f3-9b3b-c818913c957a","Type":"ContainerDied","Data":"92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc"} Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.413405 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-hgnrz" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.413420 4685 scope.go:117] "RemoveContainer" containerID="92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.413410 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-hgnrz" event={"ID":"4e159366-bd40-43f3-9b3b-c818913c957a","Type":"ContainerDied","Data":"953b50d8291904ca3a1fc81351b9bab761fa5960458702dec67b77fc1a5f33b4"} Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.417262 4685 generic.go:334] "Generic (PLEG): container finished" podID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerID="02777f99f77171f655a6d06e92735b82719e16eb1b3be1220659c0d37f07f619" exitCode=0 Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.417301 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" event={"ID":"a8daea87-dd6c-4d94-bc95-69ee4a7813a9","Type":"ContainerDied","Data":"02777f99f77171f655a6d06e92735b82719e16eb1b3be1220659c0d37f07f619"} Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.452631 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-hgnrz"] Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.457937 4685 scope.go:117] "RemoveContainer" containerID="92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc" Dec 02 10:14:04 crc kubenswrapper[4685]: E1202 10:14:04.458354 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc\": container with ID starting with 92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc not found: ID does not exist" containerID="92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.458405 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc"} err="failed to get container status \"92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc\": rpc error: code = NotFound desc = could not find container \"92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc\": container with ID starting with 92d073f19d15733718e40f35a56b63600fe5f58650cd2b15b416ca43400439dc not found: ID does not exist" Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.461658 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-hgnrz"] Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.721776 4685 patch_prober.go:28] interesting pod/console-f9d7485db-hgnrz container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/health\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 02 10:14:04 crc kubenswrapper[4685]: I1202 10:14:04.721886 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-hgnrz" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 02 10:14:05 crc kubenswrapper[4685]: I1202 10:14:05.425847 4685 generic.go:334] "Generic (PLEG): container finished" podID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerID="6c96476c27edda3bf7df36f60b502a95fcdb5fc78a96794dfe2520e24ed3e8e9" exitCode=0 Dec 02 10:14:05 crc kubenswrapper[4685]: I1202 10:14:05.425941 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" event={"ID":"a8daea87-dd6c-4d94-bc95-69ee4a7813a9","Type":"ContainerDied","Data":"6c96476c27edda3bf7df36f60b502a95fcdb5fc78a96794dfe2520e24ed3e8e9"} Dec 02 10:14:05 crc kubenswrapper[4685]: I1202 10:14:05.922301 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" path="/var/lib/kubelet/pods/4e159366-bd40-43f3-9b3b-c818913c957a/volumes" Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.663312 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.788929 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-util\") pod \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.789031 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-bundle\") pod \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.789075 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmzzt\" (UniqueName: \"kubernetes.io/projected/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-kube-api-access-kmzzt\") pod \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\" (UID: \"a8daea87-dd6c-4d94-bc95-69ee4a7813a9\") " Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.791224 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-bundle" (OuterVolumeSpecName: "bundle") pod "a8daea87-dd6c-4d94-bc95-69ee4a7813a9" (UID: "a8daea87-dd6c-4d94-bc95-69ee4a7813a9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.797754 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-kube-api-access-kmzzt" (OuterVolumeSpecName: "kube-api-access-kmzzt") pod "a8daea87-dd6c-4d94-bc95-69ee4a7813a9" (UID: "a8daea87-dd6c-4d94-bc95-69ee4a7813a9"). InnerVolumeSpecName "kube-api-access-kmzzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.890955 4685 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:06 crc kubenswrapper[4685]: I1202 10:14:06.890986 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmzzt\" (UniqueName: \"kubernetes.io/projected/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-kube-api-access-kmzzt\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:07 crc kubenswrapper[4685]: I1202 10:14:07.262628 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-util" (OuterVolumeSpecName: "util") pod "a8daea87-dd6c-4d94-bc95-69ee4a7813a9" (UID: "a8daea87-dd6c-4d94-bc95-69ee4a7813a9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:14:07 crc kubenswrapper[4685]: I1202 10:14:07.296105 4685 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a8daea87-dd6c-4d94-bc95-69ee4a7813a9-util\") on node \"crc\" DevicePath \"\"" Dec 02 10:14:07 crc kubenswrapper[4685]: I1202 10:14:07.443503 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" event={"ID":"a8daea87-dd6c-4d94-bc95-69ee4a7813a9","Type":"ContainerDied","Data":"3e078540d684cdc3bd76c24444213202f3a992bc04248e7b601f80f117e0dfe3"} Dec 02 10:14:07 crc kubenswrapper[4685]: I1202 10:14:07.443548 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e078540d684cdc3bd76c24444213202f3a992bc04248e7b601f80f117e0dfe3" Dec 02 10:14:07 crc kubenswrapper[4685]: I1202 10:14:07.443619 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc" Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.147653 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.148847 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.148964 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.149527 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3a4bcb8208e258f079f3fb221ee480a662e75e3d7ac311aacc2db0117f88c73a"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.149691 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://3a4bcb8208e258f079f3fb221ee480a662e75e3d7ac311aacc2db0117f88c73a" gracePeriod=600 Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.493324 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="3a4bcb8208e258f079f3fb221ee480a662e75e3d7ac311aacc2db0117f88c73a" exitCode=0 Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.493673 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"3a4bcb8208e258f079f3fb221ee480a662e75e3d7ac311aacc2db0117f88c73a"} Dec 02 10:14:12 crc kubenswrapper[4685]: I1202 10:14:12.493708 4685 scope.go:117] "RemoveContainer" containerID="756a98cb3b9410eaff964c85a1d8208136ab965899a092f6f28f719bd0b0236c" Dec 02 10:14:13 crc kubenswrapper[4685]: I1202 10:14:13.502223 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"655120d7a1362181f84e921b333c967e14aabc7193e6f3de840afaf3bc97e69a"} Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.755625 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9"] Dec 02 10:14:17 crc kubenswrapper[4685]: E1202 10:14:17.756231 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="pull" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756245 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="pull" Dec 02 10:14:17 crc kubenswrapper[4685]: E1202 10:14:17.756263 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="util" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756268 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="util" Dec 02 10:14:17 crc kubenswrapper[4685]: E1202 10:14:17.756279 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="extract" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756284 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="extract" Dec 02 10:14:17 crc kubenswrapper[4685]: E1202 10:14:17.756294 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756300 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756392 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e159366-bd40-43f3-9b3b-c818913c957a" containerName="console" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756402 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8daea87-dd6c-4d94-bc95-69ee4a7813a9" containerName="extract" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.756790 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.766295 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.770988 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.771551 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-6htsw" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.777727 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.779540 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.832166 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9"] Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.933202 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njt52\" (UniqueName: \"kubernetes.io/projected/9edd243d-18c6-4345-b31d-a9825a3ad745-kube-api-access-njt52\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.933265 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9edd243d-18c6-4345-b31d-a9825a3ad745-apiservice-cert\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:17 crc kubenswrapper[4685]: I1202 10:14:17.933347 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9edd243d-18c6-4345-b31d-a9825a3ad745-webhook-cert\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.034790 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9edd243d-18c6-4345-b31d-a9825a3ad745-webhook-cert\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.034862 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njt52\" (UniqueName: \"kubernetes.io/projected/9edd243d-18c6-4345-b31d-a9825a3ad745-kube-api-access-njt52\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.034890 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9edd243d-18c6-4345-b31d-a9825a3ad745-apiservice-cert\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.040397 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9edd243d-18c6-4345-b31d-a9825a3ad745-apiservice-cert\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.042216 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9edd243d-18c6-4345-b31d-a9825a3ad745-webhook-cert\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.056997 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njt52\" (UniqueName: \"kubernetes.io/projected/9edd243d-18c6-4345-b31d-a9825a3ad745-kube-api-access-njt52\") pod \"metallb-operator-controller-manager-685cd4986b-57sm9\" (UID: \"9edd243d-18c6-4345-b31d-a9825a3ad745\") " pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.077808 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.145974 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69"] Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.146619 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.150744 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.151111 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.151290 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-v9h9j" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.164531 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69"] Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.339777 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjs9d\" (UniqueName: \"kubernetes.io/projected/7883a47e-8804-4094-8e91-73f854896283-kube-api-access-xjs9d\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.340029 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7883a47e-8804-4094-8e91-73f854896283-webhook-cert\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.340069 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7883a47e-8804-4094-8e91-73f854896283-apiservice-cert\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.385744 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9"] Dec 02 10:14:18 crc kubenswrapper[4685]: W1202 10:14:18.398756 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9edd243d_18c6_4345_b31d_a9825a3ad745.slice/crio-3527fc019ac25f4f1dc688766e0c1d1b5d95c4516ae76bb6d2fe28f67138fd3c WatchSource:0}: Error finding container 3527fc019ac25f4f1dc688766e0c1d1b5d95c4516ae76bb6d2fe28f67138fd3c: Status 404 returned error can't find the container with id 3527fc019ac25f4f1dc688766e0c1d1b5d95c4516ae76bb6d2fe28f67138fd3c Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.440891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7883a47e-8804-4094-8e91-73f854896283-apiservice-cert\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.441001 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjs9d\" (UniqueName: \"kubernetes.io/projected/7883a47e-8804-4094-8e91-73f854896283-kube-api-access-xjs9d\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.441033 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7883a47e-8804-4094-8e91-73f854896283-webhook-cert\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.455511 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7883a47e-8804-4094-8e91-73f854896283-apiservice-cert\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.456103 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7883a47e-8804-4094-8e91-73f854896283-webhook-cert\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.462873 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjs9d\" (UniqueName: \"kubernetes.io/projected/7883a47e-8804-4094-8e91-73f854896283-kube-api-access-xjs9d\") pod \"metallb-operator-webhook-server-6db9d48979-ljd69\" (UID: \"7883a47e-8804-4094-8e91-73f854896283\") " pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.528839 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" event={"ID":"9edd243d-18c6-4345-b31d-a9825a3ad745","Type":"ContainerStarted","Data":"3527fc019ac25f4f1dc688766e0c1d1b5d95c4516ae76bb6d2fe28f67138fd3c"} Dec 02 10:14:18 crc kubenswrapper[4685]: I1202 10:14:18.761375 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:19 crc kubenswrapper[4685]: I1202 10:14:19.036585 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69"] Dec 02 10:14:19 crc kubenswrapper[4685]: W1202 10:14:19.051699 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7883a47e_8804_4094_8e91_73f854896283.slice/crio-bbdfee1fb8d7a2efb16bc64f4b51b006e6a9880be877da413966763e09b55759 WatchSource:0}: Error finding container bbdfee1fb8d7a2efb16bc64f4b51b006e6a9880be877da413966763e09b55759: Status 404 returned error can't find the container with id bbdfee1fb8d7a2efb16bc64f4b51b006e6a9880be877da413966763e09b55759 Dec 02 10:14:19 crc kubenswrapper[4685]: I1202 10:14:19.536758 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" event={"ID":"7883a47e-8804-4094-8e91-73f854896283","Type":"ContainerStarted","Data":"bbdfee1fb8d7a2efb16bc64f4b51b006e6a9880be877da413966763e09b55759"} Dec 02 10:14:25 crc kubenswrapper[4685]: I1202 10:14:25.579485 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" event={"ID":"9edd243d-18c6-4345-b31d-a9825a3ad745","Type":"ContainerStarted","Data":"2f86e5fa1dfa70eca8faa88211111b64dbf9516656fee22e41de00382e866b5c"} Dec 02 10:14:25 crc kubenswrapper[4685]: I1202 10:14:25.581230 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:25 crc kubenswrapper[4685]: I1202 10:14:25.583812 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" event={"ID":"7883a47e-8804-4094-8e91-73f854896283","Type":"ContainerStarted","Data":"f1f51626d8b912257b0f2604a964cf66a1598c4192bcbfe3cc5e599cd24b7a55"} Dec 02 10:14:25 crc kubenswrapper[4685]: I1202 10:14:25.584041 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:25 crc kubenswrapper[4685]: I1202 10:14:25.633838 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" podStartSLOduration=2.300263071 podStartE2EDuration="8.633822504s" podCreationTimestamp="2025-12-02 10:14:17 +0000 UTC" firstStartedPulling="2025-12-02 10:14:18.401132298 +0000 UTC m=+750.772906452" lastFinishedPulling="2025-12-02 10:14:24.734691731 +0000 UTC m=+757.106465885" observedRunningTime="2025-12-02 10:14:25.611592493 +0000 UTC m=+757.983366657" watchObservedRunningTime="2025-12-02 10:14:25.633822504 +0000 UTC m=+758.005596658" Dec 02 10:14:25 crc kubenswrapper[4685]: I1202 10:14:25.636772 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" podStartSLOduration=1.9399247960000001 podStartE2EDuration="7.636756455s" podCreationTimestamp="2025-12-02 10:14:18 +0000 UTC" firstStartedPulling="2025-12-02 10:14:19.054702354 +0000 UTC m=+751.426476508" lastFinishedPulling="2025-12-02 10:14:24.751534013 +0000 UTC m=+757.123308167" observedRunningTime="2025-12-02 10:14:25.632862767 +0000 UTC m=+758.004636931" watchObservedRunningTime="2025-12-02 10:14:25.636756455 +0000 UTC m=+758.008530609" Dec 02 10:14:38 crc kubenswrapper[4685]: I1202 10:14:38.766991 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6db9d48979-ljd69" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.080212 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-685cd4986b-57sm9" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.823409 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk"] Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.824346 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.834827 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-jbwrt"] Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.837584 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.842037 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.842842 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-r7fld" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.843866 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.845929 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.847197 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk"] Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865184 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics-certs\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865229 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-conf\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865270 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8vdg\" (UniqueName: \"kubernetes.io/projected/aa6b58de-e46c-4019-9a27-2991ad81429f-kube-api-access-f8vdg\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865306 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-sockets\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865343 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-startup\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865474 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-reloader\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865594 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkcq8\" (UniqueName: \"kubernetes.io/projected/80b410fd-737d-4aed-8095-8ea5386c9cea-kube-api-access-pkcq8\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865634 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa6b58de-e46c-4019-9a27-2991ad81429f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.865665 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.929684 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-2mprm"] Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.930691 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-2mprm" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.938095 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.938143 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.938413 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.943852 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-z8v2s"] Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.944888 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.945693 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-n672g" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.946299 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.964546 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-z8v2s"] Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.969982 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkcq8\" (UniqueName: \"kubernetes.io/projected/80b410fd-737d-4aed-8095-8ea5386c9cea-kube-api-access-pkcq8\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970042 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa6b58de-e46c-4019-9a27-2991ad81429f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970079 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970146 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66ngh\" (UniqueName: \"kubernetes.io/projected/acd555b8-75ea-48d0-a942-c8762e15f37c-kube-api-access-66ngh\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970171 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics-certs\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970194 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-conf\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970229 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-metallb-excludel2\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970260 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8vdg\" (UniqueName: \"kubernetes.io/projected/aa6b58de-e46c-4019-9a27-2991ad81429f-kube-api-access-f8vdg\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970298 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-sockets\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970332 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4cw7\" (UniqueName: \"kubernetes.io/projected/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-kube-api-access-s4cw7\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970352 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-startup\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970379 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-cert\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970399 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-metrics-certs\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970421 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-metrics-certs\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970462 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-reloader\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.970534 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:58 crc kubenswrapper[4685]: E1202 10:14:58.970963 4685 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 02 10:14:58 crc kubenswrapper[4685]: E1202 10:14:58.971016 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa6b58de-e46c-4019-9a27-2991ad81429f-cert podName:aa6b58de-e46c-4019-9a27-2991ad81429f nodeName:}" failed. No retries permitted until 2025-12-02 10:14:59.470999064 +0000 UTC m=+791.842773218 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/aa6b58de-e46c-4019-9a27-2991ad81429f-cert") pod "frr-k8s-webhook-server-7fcb986d4-gdnpk" (UID: "aa6b58de-e46c-4019-9a27-2991ad81429f") : secret "frr-k8s-webhook-server-cert" not found Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.971587 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.972762 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-startup\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: E1202 10:14:58.972893 4685 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 02 10:14:58 crc kubenswrapper[4685]: E1202 10:14:58.972947 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics-certs podName:80b410fd-737d-4aed-8095-8ea5386c9cea nodeName:}" failed. No retries permitted until 2025-12-02 10:14:59.472930577 +0000 UTC m=+791.844704731 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics-certs") pod "frr-k8s-jbwrt" (UID: "80b410fd-737d-4aed-8095-8ea5386c9cea") : secret "frr-k8s-certs-secret" not found Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.973163 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-conf\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.973685 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-reloader\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:58 crc kubenswrapper[4685]: I1202 10:14:58.981254 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/80b410fd-737d-4aed-8095-8ea5386c9cea-frr-sockets\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.016363 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkcq8\" (UniqueName: \"kubernetes.io/projected/80b410fd-737d-4aed-8095-8ea5386c9cea-kube-api-access-pkcq8\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.045264 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8vdg\" (UniqueName: \"kubernetes.io/projected/aa6b58de-e46c-4019-9a27-2991ad81429f-kube-api-access-f8vdg\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.071886 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-cert\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.071932 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-metrics-certs\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.071951 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-metrics-certs\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.071992 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.072039 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66ngh\" (UniqueName: \"kubernetes.io/projected/acd555b8-75ea-48d0-a942-c8762e15f37c-kube-api-access-66ngh\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.072073 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-metallb-excludel2\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.072099 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4cw7\" (UniqueName: \"kubernetes.io/projected/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-kube-api-access-s4cw7\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: E1202 10:14:59.072823 4685 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 02 10:14:59 crc kubenswrapper[4685]: E1202 10:14:59.072889 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-metrics-certs podName:acd555b8-75ea-48d0-a942-c8762e15f37c nodeName:}" failed. No retries permitted until 2025-12-02 10:14:59.572869003 +0000 UTC m=+791.944643157 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-metrics-certs") pod "controller-f8648f98b-z8v2s" (UID: "acd555b8-75ea-48d0-a942-c8762e15f37c") : secret "controller-certs-secret" not found Dec 02 10:14:59 crc kubenswrapper[4685]: E1202 10:14:59.073058 4685 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 10:14:59 crc kubenswrapper[4685]: E1202 10:14:59.073094 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist podName:e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7 nodeName:}" failed. No retries permitted until 2025-12-02 10:14:59.573084829 +0000 UTC m=+791.944859063 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist") pod "speaker-2mprm" (UID: "e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7") : secret "metallb-memberlist" not found Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.073417 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-metallb-excludel2\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.077790 4685 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.086118 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-cert\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.094103 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-metrics-certs\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.097202 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66ngh\" (UniqueName: \"kubernetes.io/projected/acd555b8-75ea-48d0-a942-c8762e15f37c-kube-api-access-66ngh\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.105131 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4cw7\" (UniqueName: \"kubernetes.io/projected/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-kube-api-access-s4cw7\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.476805 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa6b58de-e46c-4019-9a27-2991ad81429f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.476861 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics-certs\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.479694 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80b410fd-737d-4aed-8095-8ea5386c9cea-metrics-certs\") pod \"frr-k8s-jbwrt\" (UID: \"80b410fd-737d-4aed-8095-8ea5386c9cea\") " pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.480906 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/aa6b58de-e46c-4019-9a27-2991ad81429f-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gdnpk\" (UID: \"aa6b58de-e46c-4019-9a27-2991ad81429f\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.578091 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:14:59 crc kubenswrapper[4685]: E1202 10:14:59.578234 4685 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 02 10:14:59 crc kubenswrapper[4685]: E1202 10:14:59.578525 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist podName:e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7 nodeName:}" failed. No retries permitted until 2025-12-02 10:15:00.578508429 +0000 UTC m=+792.950282573 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist") pod "speaker-2mprm" (UID: "e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7") : secret "metallb-memberlist" not found Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.578452 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-metrics-certs\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.581726 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/acd555b8-75ea-48d0-a942-c8762e15f37c-metrics-certs\") pod \"controller-f8648f98b-z8v2s\" (UID: \"acd555b8-75ea-48d0-a942-c8762e15f37c\") " pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.744062 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.756646 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:14:59 crc kubenswrapper[4685]: I1202 10:14:59.875076 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.050408 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk"] Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.146259 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-z8v2s"] Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.160441 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf"] Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.161146 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.162726 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.162927 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.175678 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf"] Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.209816 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87325cd0-927f-4761-a3db-1640fe3060c7-secret-volume\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.209872 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87325cd0-927f-4761-a3db-1640fe3060c7-config-volume\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.209965 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87hj4\" (UniqueName: \"kubernetes.io/projected/87325cd0-927f-4761-a3db-1640fe3060c7-kube-api-access-87hj4\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: W1202 10:15:00.218262 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacd555b8_75ea_48d0_a942_c8762e15f37c.slice/crio-2fa54c5824d9bcbc21498f5e706e29f9af5bc4db8c549445fce0a1192380a2e3 WatchSource:0}: Error finding container 2fa54c5824d9bcbc21498f5e706e29f9af5bc4db8c549445fce0a1192380a2e3: Status 404 returned error can't find the container with id 2fa54c5824d9bcbc21498f5e706e29f9af5bc4db8c549445fce0a1192380a2e3 Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.311184 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87325cd0-927f-4761-a3db-1640fe3060c7-secret-volume\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.311239 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87325cd0-927f-4761-a3db-1640fe3060c7-config-volume\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.311315 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87hj4\" (UniqueName: \"kubernetes.io/projected/87325cd0-927f-4761-a3db-1640fe3060c7-kube-api-access-87hj4\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.312253 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87325cd0-927f-4761-a3db-1640fe3060c7-config-volume\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.317253 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87325cd0-927f-4761-a3db-1640fe3060c7-secret-volume\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.338450 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87hj4\" (UniqueName: \"kubernetes.io/projected/87325cd0-927f-4761-a3db-1640fe3060c7-kube-api-access-87hj4\") pod \"collect-profiles-29411175-k45vf\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.477986 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.615253 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.622116 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7-memberlist\") pod \"speaker-2mprm\" (UID: \"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7\") " pod="metallb-system/speaker-2mprm" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.743411 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-2mprm" Dec 02 10:15:00 crc kubenswrapper[4685]: W1202 10:15:00.760774 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6bcf4ed_6fbe_46b2_880c_e70bddd3d6f7.slice/crio-6fe09b0df124cd59aa3cd4be325640d79bba55287b73e7f9d04752c106543165 WatchSource:0}: Error finding container 6fe09b0df124cd59aa3cd4be325640d79bba55287b73e7f9d04752c106543165: Status 404 returned error can't find the container with id 6fe09b0df124cd59aa3cd4be325640d79bba55287b73e7f9d04752c106543165 Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.785683 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-z8v2s" event={"ID":"acd555b8-75ea-48d0-a942-c8762e15f37c","Type":"ContainerStarted","Data":"26d421abe31b2254c6c8b09fedb50f8fcef7d0b7baf0c0e15e02490497b22173"} Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.785763 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.785780 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-z8v2s" event={"ID":"acd555b8-75ea-48d0-a942-c8762e15f37c","Type":"ContainerStarted","Data":"74dd9dad8539b41ca4dfc817c3bdae94b3ce396c900ea80ff538f1c271406104"} Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.785793 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-z8v2s" event={"ID":"acd555b8-75ea-48d0-a942-c8762e15f37c","Type":"ContainerStarted","Data":"2fa54c5824d9bcbc21498f5e706e29f9af5bc4db8c549445fce0a1192380a2e3"} Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.787387 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"1a038aa509a563c71223d0bcddead3de3d6deb830ae259e670f1afda22e34ec8"} Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.789057 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" event={"ID":"aa6b58de-e46c-4019-9a27-2991ad81429f","Type":"ContainerStarted","Data":"d7f2361596fa4ea395d4cf04267cc5aaed3ae64185dc686637c98de46f9f8cc7"} Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.790910 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2mprm" event={"ID":"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7","Type":"ContainerStarted","Data":"6fe09b0df124cd59aa3cd4be325640d79bba55287b73e7f9d04752c106543165"} Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.806279 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-z8v2s" podStartSLOduration=2.806263899 podStartE2EDuration="2.806263899s" podCreationTimestamp="2025-12-02 10:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:15:00.802729033 +0000 UTC m=+793.174503187" watchObservedRunningTime="2025-12-02 10:15:00.806263899 +0000 UTC m=+793.178038053" Dec 02 10:15:00 crc kubenswrapper[4685]: I1202 10:15:00.912397 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf"] Dec 02 10:15:00 crc kubenswrapper[4685]: W1202 10:15:00.939906 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87325cd0_927f_4761_a3db_1640fe3060c7.slice/crio-534db6dfe8a01ac0caebec5f50d4b4b11c7e37e7428dcded2935207fa58efc66 WatchSource:0}: Error finding container 534db6dfe8a01ac0caebec5f50d4b4b11c7e37e7428dcded2935207fa58efc66: Status 404 returned error can't find the container with id 534db6dfe8a01ac0caebec5f50d4b4b11c7e37e7428dcded2935207fa58efc66 Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.805519 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2mprm" event={"ID":"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7","Type":"ContainerStarted","Data":"9232262779fb1c4236db7463c827005fcf8ff6808b8a515183bf2782578411f1"} Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.805780 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2mprm" event={"ID":"e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7","Type":"ContainerStarted","Data":"f3c7ad3d671b979e1eb50e8d8cce3a4db438e63e0bd60e5f3d568b3e5384ad00"} Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.805814 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-2mprm" Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.807910 4685 generic.go:334] "Generic (PLEG): container finished" podID="87325cd0-927f-4761-a3db-1640fe3060c7" containerID="1973465ea9fccfd5a052956e335d2663c0b4c615a89d8330a849d73f4e8033a3" exitCode=0 Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.807993 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" event={"ID":"87325cd0-927f-4761-a3db-1640fe3060c7","Type":"ContainerDied","Data":"1973465ea9fccfd5a052956e335d2663c0b4c615a89d8330a849d73f4e8033a3"} Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.808018 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" event={"ID":"87325cd0-927f-4761-a3db-1640fe3060c7","Type":"ContainerStarted","Data":"534db6dfe8a01ac0caebec5f50d4b4b11c7e37e7428dcded2935207fa58efc66"} Dec 02 10:15:01 crc kubenswrapper[4685]: I1202 10:15:01.834190 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-2mprm" podStartSLOduration=3.834171318 podStartE2EDuration="3.834171318s" podCreationTimestamp="2025-12-02 10:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:15:01.829588832 +0000 UTC m=+794.201362986" watchObservedRunningTime="2025-12-02 10:15:01.834171318 +0000 UTC m=+794.205945472" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.074779 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xphfz"] Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.076553 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.106133 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xphfz"] Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.157926 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtcxg\" (UniqueName: \"kubernetes.io/projected/7cf41ade-7601-4b2a-8705-76d9f1e50212-kube-api-access-rtcxg\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.158034 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-utilities\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.158112 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-catalog-content\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.259065 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtcxg\" (UniqueName: \"kubernetes.io/projected/7cf41ade-7601-4b2a-8705-76d9f1e50212-kube-api-access-rtcxg\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.259126 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-utilities\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.259162 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-catalog-content\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.259613 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-catalog-content\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.260145 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-utilities\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.312593 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtcxg\" (UniqueName: \"kubernetes.io/projected/7cf41ade-7601-4b2a-8705-76d9f1e50212-kube-api-access-rtcxg\") pod \"community-operators-xphfz\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.395205 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.420002 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.463763 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87325cd0-927f-4761-a3db-1640fe3060c7-config-volume\") pod \"87325cd0-927f-4761-a3db-1640fe3060c7\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.463900 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87325cd0-927f-4761-a3db-1640fe3060c7-secret-volume\") pod \"87325cd0-927f-4761-a3db-1640fe3060c7\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.463939 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87hj4\" (UniqueName: \"kubernetes.io/projected/87325cd0-927f-4761-a3db-1640fe3060c7-kube-api-access-87hj4\") pod \"87325cd0-927f-4761-a3db-1640fe3060c7\" (UID: \"87325cd0-927f-4761-a3db-1640fe3060c7\") " Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.464628 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87325cd0-927f-4761-a3db-1640fe3060c7-config-volume" (OuterVolumeSpecName: "config-volume") pod "87325cd0-927f-4761-a3db-1640fe3060c7" (UID: "87325cd0-927f-4761-a3db-1640fe3060c7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.467784 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87325cd0-927f-4761-a3db-1640fe3060c7-kube-api-access-87hj4" (OuterVolumeSpecName: "kube-api-access-87hj4") pod "87325cd0-927f-4761-a3db-1640fe3060c7" (UID: "87325cd0-927f-4761-a3db-1640fe3060c7"). InnerVolumeSpecName "kube-api-access-87hj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.468202 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87325cd0-927f-4761-a3db-1640fe3060c7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "87325cd0-927f-4761-a3db-1640fe3060c7" (UID: "87325cd0-927f-4761-a3db-1640fe3060c7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.565477 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87325cd0-927f-4761-a3db-1640fe3060c7-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.565520 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87hj4\" (UniqueName: \"kubernetes.io/projected/87325cd0-927f-4761-a3db-1640fe3060c7-kube-api-access-87hj4\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.565534 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87325cd0-927f-4761-a3db-1640fe3060c7-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.840375 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" event={"ID":"87325cd0-927f-4761-a3db-1640fe3060c7","Type":"ContainerDied","Data":"534db6dfe8a01ac0caebec5f50d4b4b11c7e37e7428dcded2935207fa58efc66"} Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.840407 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="534db6dfe8a01ac0caebec5f50d4b4b11c7e37e7428dcded2935207fa58efc66" Dec 02 10:15:03 crc kubenswrapper[4685]: I1202 10:15:03.840725 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf" Dec 02 10:15:04 crc kubenswrapper[4685]: I1202 10:15:04.044504 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xphfz"] Dec 02 10:15:04 crc kubenswrapper[4685]: I1202 10:15:04.869260 4685 generic.go:334] "Generic (PLEG): container finished" podID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerID="0b273e58e81a7959aee7f9a88b706752b2d6283ba2a08368b9a25eb2bcafe6da" exitCode=0 Dec 02 10:15:04 crc kubenswrapper[4685]: I1202 10:15:04.869346 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xphfz" event={"ID":"7cf41ade-7601-4b2a-8705-76d9f1e50212","Type":"ContainerDied","Data":"0b273e58e81a7959aee7f9a88b706752b2d6283ba2a08368b9a25eb2bcafe6da"} Dec 02 10:15:04 crc kubenswrapper[4685]: I1202 10:15:04.869624 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xphfz" event={"ID":"7cf41ade-7601-4b2a-8705-76d9f1e50212","Type":"ContainerStarted","Data":"10e14c762a196b10a7da203324b9c6b8f2f6d364df6f2349eae823f36d148cc4"} Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.481666 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lxf9v"] Dec 02 10:15:10 crc kubenswrapper[4685]: E1202 10:15:10.482458 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87325cd0-927f-4761-a3db-1640fe3060c7" containerName="collect-profiles" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.482472 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="87325cd0-927f-4761-a3db-1640fe3060c7" containerName="collect-profiles" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.482629 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="87325cd0-927f-4761-a3db-1640fe3060c7" containerName="collect-profiles" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.483490 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.487148 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxf9v"] Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.577705 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-catalog-content\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.577762 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-utilities\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.577792 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxdgn\" (UniqueName: \"kubernetes.io/projected/850a45a9-49fe-474d-bbea-045b9a08dee4-kube-api-access-jxdgn\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.678895 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-catalog-content\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.678941 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-utilities\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.678973 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxdgn\" (UniqueName: \"kubernetes.io/projected/850a45a9-49fe-474d-bbea-045b9a08dee4-kube-api-access-jxdgn\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.679773 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-catalog-content\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.680032 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-utilities\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.703740 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxdgn\" (UniqueName: \"kubernetes.io/projected/850a45a9-49fe-474d-bbea-045b9a08dee4-kube-api-access-jxdgn\") pod \"redhat-marketplace-lxf9v\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.748430 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-2mprm" Dec 02 10:15:10 crc kubenswrapper[4685]: I1202 10:15:10.804198 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.745406 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxf9v"] Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.929421 4685 generic.go:334] "Generic (PLEG): container finished" podID="80b410fd-737d-4aed-8095-8ea5386c9cea" containerID="81e9873bf1712df25a0d1d1abbf27ce280a7b04cf65f6f3e01d5ca804be9c43e" exitCode=0 Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.929480 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerDied","Data":"81e9873bf1712df25a0d1d1abbf27ce280a7b04cf65f6f3e01d5ca804be9c43e"} Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.932213 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" event={"ID":"aa6b58de-e46c-4019-9a27-2991ad81429f","Type":"ContainerStarted","Data":"bbdfe2d125032a20963098895ce62f28648de051f569bd6bc37fb472fa67f3bb"} Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.932424 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.937944 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxf9v" event={"ID":"850a45a9-49fe-474d-bbea-045b9a08dee4","Type":"ContainerStarted","Data":"52428c112686283bdb1cbefd651658e87774ff29fdb3b061832debf2362bc333"} Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.940883 4685 generic.go:334] "Generic (PLEG): container finished" podID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerID="bf28e1fc2a727be16bf3200e5287f438690a237f9de9dfdf01c7a20f7d4dd6fa" exitCode=0 Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.940934 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xphfz" event={"ID":"7cf41ade-7601-4b2a-8705-76d9f1e50212","Type":"ContainerDied","Data":"bf28e1fc2a727be16bf3200e5287f438690a237f9de9dfdf01c7a20f7d4dd6fa"} Dec 02 10:15:11 crc kubenswrapper[4685]: I1202 10:15:11.996541 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" podStartSLOduration=2.595923949 podStartE2EDuration="13.996521946s" podCreationTimestamp="2025-12-02 10:14:58 +0000 UTC" firstStartedPulling="2025-12-02 10:15:00.095503586 +0000 UTC m=+792.467277740" lastFinishedPulling="2025-12-02 10:15:11.496101583 +0000 UTC m=+803.867875737" observedRunningTime="2025-12-02 10:15:11.993433661 +0000 UTC m=+804.365207825" watchObservedRunningTime="2025-12-02 10:15:11.996521946 +0000 UTC m=+804.368296100" Dec 02 10:15:12 crc kubenswrapper[4685]: I1202 10:15:12.947133 4685 generic.go:334] "Generic (PLEG): container finished" podID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerID="4dc07f837c97e7edaa2f7bc242432ac7cb777a4b7c2578066f86b9b5e15ccc9e" exitCode=0 Dec 02 10:15:12 crc kubenswrapper[4685]: I1202 10:15:12.947238 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxf9v" event={"ID":"850a45a9-49fe-474d-bbea-045b9a08dee4","Type":"ContainerDied","Data":"4dc07f837c97e7edaa2f7bc242432ac7cb777a4b7c2578066f86b9b5e15ccc9e"} Dec 02 10:15:12 crc kubenswrapper[4685]: I1202 10:15:12.949978 4685 generic.go:334] "Generic (PLEG): container finished" podID="80b410fd-737d-4aed-8095-8ea5386c9cea" containerID="e1a556c76075fec480538708b93ed5a4dc88db5de81c8f31e3a4f4d8a971f3c7" exitCode=0 Dec 02 10:15:12 crc kubenswrapper[4685]: I1202 10:15:12.950516 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerDied","Data":"e1a556c76075fec480538708b93ed5a4dc88db5de81c8f31e3a4f4d8a971f3c7"} Dec 02 10:15:13 crc kubenswrapper[4685]: I1202 10:15:13.958367 4685 generic.go:334] "Generic (PLEG): container finished" podID="80b410fd-737d-4aed-8095-8ea5386c9cea" containerID="dad6ba6851cefaaec76db4cb00ba9d70733178193314c7963a80b39e599d7691" exitCode=0 Dec 02 10:15:13 crc kubenswrapper[4685]: I1202 10:15:13.958467 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerDied","Data":"dad6ba6851cefaaec76db4cb00ba9d70733178193314c7963a80b39e599d7691"} Dec 02 10:15:13 crc kubenswrapper[4685]: I1202 10:15:13.961384 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xphfz" event={"ID":"7cf41ade-7601-4b2a-8705-76d9f1e50212","Type":"ContainerStarted","Data":"06a7179d6cd5c6fa5eccd523be501a5c182f2c707f3314b77910bb472ab6f9fc"} Dec 02 10:15:14 crc kubenswrapper[4685]: I1202 10:15:14.009810 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xphfz" podStartSLOduration=2.803686866 podStartE2EDuration="11.009751242s" podCreationTimestamp="2025-12-02 10:15:03 +0000 UTC" firstStartedPulling="2025-12-02 10:15:04.875327164 +0000 UTC m=+797.247101318" lastFinishedPulling="2025-12-02 10:15:13.08139153 +0000 UTC m=+805.453165694" observedRunningTime="2025-12-02 10:15:14.006948085 +0000 UTC m=+806.378722239" watchObservedRunningTime="2025-12-02 10:15:14.009751242 +0000 UTC m=+806.381525396" Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.988902 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"202ad9ec0cb3d045feb335b36a6c64f76511177dcc7868db4917eb1130826c77"} Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.989230 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"3bee1e89a290a5f8ad3c9e0bd9d89263c3dc739ef915d4d808c2ce7e7bd457aa"} Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.989247 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"9f0186a671664fffc9c3e8127effe3aff10663108c56d01747f8b2c32c289393"} Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.989259 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"cbeedcf8e59abe9d4dadd1555a8a98f9bab233ed2b06cc790f49b28d30b089ad"} Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.989270 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"a35261ba7a8a21787ee9552e627e7415179817cf80f25020edc3c6e948038a85"} Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.991267 4685 generic.go:334] "Generic (PLEG): container finished" podID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerID="fcba25e141ec1a7ace8a02b49595baeda9577f63c20ac5055de5a98edaca23ae" exitCode=0 Dec 02 10:15:15 crc kubenswrapper[4685]: I1202 10:15:15.991324 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxf9v" event={"ID":"850a45a9-49fe-474d-bbea-045b9a08dee4","Type":"ContainerDied","Data":"fcba25e141ec1a7ace8a02b49595baeda9577f63c20ac5055de5a98edaca23ae"} Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.001467 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-jbwrt" event={"ID":"80b410fd-737d-4aed-8095-8ea5386c9cea","Type":"ContainerStarted","Data":"3a828033b071ed1c88dadff86497857ac88d8ed5af53e1f0de2112e993bc50c4"} Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.002430 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.027951 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-jbwrt" podStartSLOduration=7.998416349 podStartE2EDuration="19.027935347s" podCreationTimestamp="2025-12-02 10:14:58 +0000 UTC" firstStartedPulling="2025-12-02 10:15:00.4383714 +0000 UTC m=+792.810145594" lastFinishedPulling="2025-12-02 10:15:11.467890438 +0000 UTC m=+803.839664592" observedRunningTime="2025-12-02 10:15:17.021302924 +0000 UTC m=+809.393077088" watchObservedRunningTime="2025-12-02 10:15:17.027935347 +0000 UTC m=+809.399709501" Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.855051 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-8mbzs"] Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.856196 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.859389 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.859739 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-4tcsv" Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.863149 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.871244 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8mbzs"] Dec 02 10:15:17 crc kubenswrapper[4685]: I1202 10:15:17.980714 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zn4n\" (UniqueName: \"kubernetes.io/projected/16f31204-e29f-41f1-b560-814671c5967a-kube-api-access-2zn4n\") pod \"openstack-operator-index-8mbzs\" (UID: \"16f31204-e29f-41f1-b560-814671c5967a\") " pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:18 crc kubenswrapper[4685]: I1202 10:15:18.012685 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxf9v" event={"ID":"850a45a9-49fe-474d-bbea-045b9a08dee4","Type":"ContainerStarted","Data":"4f12fbc7596844b9a7c3f1d27e2c3551aca52a89891969adf960c001786b215f"} Dec 02 10:15:18 crc kubenswrapper[4685]: I1202 10:15:18.034009 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lxf9v" podStartSLOduration=3.499324825 podStartE2EDuration="8.033989415s" podCreationTimestamp="2025-12-02 10:15:10 +0000 UTC" firstStartedPulling="2025-12-02 10:15:12.949280509 +0000 UTC m=+805.321054663" lastFinishedPulling="2025-12-02 10:15:17.483945089 +0000 UTC m=+809.855719253" observedRunningTime="2025-12-02 10:15:18.030464898 +0000 UTC m=+810.402239062" watchObservedRunningTime="2025-12-02 10:15:18.033989415 +0000 UTC m=+810.405763569" Dec 02 10:15:18 crc kubenswrapper[4685]: I1202 10:15:18.082162 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zn4n\" (UniqueName: \"kubernetes.io/projected/16f31204-e29f-41f1-b560-814671c5967a-kube-api-access-2zn4n\") pod \"openstack-operator-index-8mbzs\" (UID: \"16f31204-e29f-41f1-b560-814671c5967a\") " pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:18 crc kubenswrapper[4685]: I1202 10:15:18.102503 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zn4n\" (UniqueName: \"kubernetes.io/projected/16f31204-e29f-41f1-b560-814671c5967a-kube-api-access-2zn4n\") pod \"openstack-operator-index-8mbzs\" (UID: \"16f31204-e29f-41f1-b560-814671c5967a\") " pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:18 crc kubenswrapper[4685]: I1202 10:15:18.171700 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:18 crc kubenswrapper[4685]: I1202 10:15:18.367303 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8mbzs"] Dec 02 10:15:18 crc kubenswrapper[4685]: W1202 10:15:18.379123 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16f31204_e29f_41f1_b560_814671c5967a.slice/crio-61d578b82f87bc3ae3feac78dfbe57e35fb56eba4566ec443fb24db6076f7b0b WatchSource:0}: Error finding container 61d578b82f87bc3ae3feac78dfbe57e35fb56eba4566ec443fb24db6076f7b0b: Status 404 returned error can't find the container with id 61d578b82f87bc3ae3feac78dfbe57e35fb56eba4566ec443fb24db6076f7b0b Dec 02 10:15:19 crc kubenswrapper[4685]: I1202 10:15:19.019232 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8mbzs" event={"ID":"16f31204-e29f-41f1-b560-814671c5967a","Type":"ContainerStarted","Data":"61d578b82f87bc3ae3feac78dfbe57e35fb56eba4566ec443fb24db6076f7b0b"} Dec 02 10:15:19 crc kubenswrapper[4685]: I1202 10:15:19.757605 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:15:19 crc kubenswrapper[4685]: I1202 10:15:19.805933 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:15:19 crc kubenswrapper[4685]: I1202 10:15:19.878687 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-z8v2s" Dec 02 10:15:20 crc kubenswrapper[4685]: I1202 10:15:20.804667 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:20 crc kubenswrapper[4685]: I1202 10:15:20.804751 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:20 crc kubenswrapper[4685]: I1202 10:15:20.849401 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:23 crc kubenswrapper[4685]: I1202 10:15:23.395799 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:23 crc kubenswrapper[4685]: I1202 10:15:23.396177 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:23 crc kubenswrapper[4685]: I1202 10:15:23.467810 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.129521 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.254924 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cgv9j"] Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.256287 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.276469 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgv9j"] Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.363437 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f138872d-e96a-4dcb-8400-c2d5982dd07c-catalog-content\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.363496 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p5rd\" (UniqueName: \"kubernetes.io/projected/f138872d-e96a-4dcb-8400-c2d5982dd07c-kube-api-access-7p5rd\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.363539 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f138872d-e96a-4dcb-8400-c2d5982dd07c-utilities\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.464796 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f138872d-e96a-4dcb-8400-c2d5982dd07c-catalog-content\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.464859 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p5rd\" (UniqueName: \"kubernetes.io/projected/f138872d-e96a-4dcb-8400-c2d5982dd07c-kube-api-access-7p5rd\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.464905 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f138872d-e96a-4dcb-8400-c2d5982dd07c-utilities\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.465824 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f138872d-e96a-4dcb-8400-c2d5982dd07c-catalog-content\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.465931 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f138872d-e96a-4dcb-8400-c2d5982dd07c-utilities\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.500227 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p5rd\" (UniqueName: \"kubernetes.io/projected/f138872d-e96a-4dcb-8400-c2d5982dd07c-kube-api-access-7p5rd\") pod \"certified-operators-cgv9j\" (UID: \"f138872d-e96a-4dcb-8400-c2d5982dd07c\") " pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:24 crc kubenswrapper[4685]: I1202 10:15:24.576662 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:28 crc kubenswrapper[4685]: I1202 10:15:28.095390 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgv9j"] Dec 02 10:15:28 crc kubenswrapper[4685]: W1202 10:15:28.797024 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf138872d_e96a_4dcb_8400_c2d5982dd07c.slice/crio-4a7da4827b4efe864615b45eb58a71ac45df6ad38dd18dc7875a924353119285 WatchSource:0}: Error finding container 4a7da4827b4efe864615b45eb58a71ac45df6ad38dd18dc7875a924353119285: Status 404 returned error can't find the container with id 4a7da4827b4efe864615b45eb58a71ac45df6ad38dd18dc7875a924353119285 Dec 02 10:15:29 crc kubenswrapper[4685]: I1202 10:15:29.097842 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgv9j" event={"ID":"f138872d-e96a-4dcb-8400-c2d5982dd07c","Type":"ContainerStarted","Data":"4a7da4827b4efe864615b45eb58a71ac45df6ad38dd18dc7875a924353119285"} Dec 02 10:15:29 crc kubenswrapper[4685]: I1202 10:15:29.246998 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xphfz"] Dec 02 10:15:29 crc kubenswrapper[4685]: I1202 10:15:29.247340 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xphfz" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="registry-server" containerID="cri-o://06a7179d6cd5c6fa5eccd523be501a5c182f2c707f3314b77910bb472ab6f9fc" gracePeriod=2 Dec 02 10:15:29 crc kubenswrapper[4685]: I1202 10:15:29.749135 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gdnpk" Dec 02 10:15:29 crc kubenswrapper[4685]: I1202 10:15:29.760017 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-jbwrt" Dec 02 10:15:30 crc kubenswrapper[4685]: I1202 10:15:30.105632 4685 generic.go:334] "Generic (PLEG): container finished" podID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerID="06a7179d6cd5c6fa5eccd523be501a5c182f2c707f3314b77910bb472ab6f9fc" exitCode=0 Dec 02 10:15:30 crc kubenswrapper[4685]: I1202 10:15:30.105696 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xphfz" event={"ID":"7cf41ade-7601-4b2a-8705-76d9f1e50212","Type":"ContainerDied","Data":"06a7179d6cd5c6fa5eccd523be501a5c182f2c707f3314b77910bb472ab6f9fc"} Dec 02 10:15:30 crc kubenswrapper[4685]: I1202 10:15:30.851574 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.123083 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.125123 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xphfz" event={"ID":"7cf41ade-7601-4b2a-8705-76d9f1e50212","Type":"ContainerDied","Data":"10e14c762a196b10a7da203324b9c6b8f2f6d364df6f2349eae823f36d148cc4"} Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.125173 4685 scope.go:117] "RemoveContainer" containerID="06a7179d6cd5c6fa5eccd523be501a5c182f2c707f3314b77910bb472ab6f9fc" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.155369 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-catalog-content\") pod \"7cf41ade-7601-4b2a-8705-76d9f1e50212\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.155421 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtcxg\" (UniqueName: \"kubernetes.io/projected/7cf41ade-7601-4b2a-8705-76d9f1e50212-kube-api-access-rtcxg\") pod \"7cf41ade-7601-4b2a-8705-76d9f1e50212\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.155473 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-utilities\") pod \"7cf41ade-7601-4b2a-8705-76d9f1e50212\" (UID: \"7cf41ade-7601-4b2a-8705-76d9f1e50212\") " Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.156770 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-utilities" (OuterVolumeSpecName: "utilities") pod "7cf41ade-7601-4b2a-8705-76d9f1e50212" (UID: "7cf41ade-7601-4b2a-8705-76d9f1e50212"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.169912 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cf41ade-7601-4b2a-8705-76d9f1e50212-kube-api-access-rtcxg" (OuterVolumeSpecName: "kube-api-access-rtcxg") pod "7cf41ade-7601-4b2a-8705-76d9f1e50212" (UID: "7cf41ade-7601-4b2a-8705-76d9f1e50212"). InnerVolumeSpecName "kube-api-access-rtcxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.214758 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7cf41ade-7601-4b2a-8705-76d9f1e50212" (UID: "7cf41ade-7601-4b2a-8705-76d9f1e50212"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.256985 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.257015 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtcxg\" (UniqueName: \"kubernetes.io/projected/7cf41ade-7601-4b2a-8705-76d9f1e50212-kube-api-access-rtcxg\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.257026 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7cf41ade-7601-4b2a-8705-76d9f1e50212-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.338353 4685 scope.go:117] "RemoveContainer" containerID="bf28e1fc2a727be16bf3200e5287f438690a237f9de9dfdf01c7a20f7d4dd6fa" Dec 02 10:15:31 crc kubenswrapper[4685]: I1202 10:15:31.359852 4685 scope.go:117] "RemoveContainer" containerID="0b273e58e81a7959aee7f9a88b706752b2d6283ba2a08368b9a25eb2bcafe6da" Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.131384 4685 generic.go:334] "Generic (PLEG): container finished" podID="f138872d-e96a-4dcb-8400-c2d5982dd07c" containerID="5303d6a58f8f658f1b1ebdefd4b83eb1bccc879e7885e54dabdd13930073ad81" exitCode=0 Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.131456 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgv9j" event={"ID":"f138872d-e96a-4dcb-8400-c2d5982dd07c","Type":"ContainerDied","Data":"5303d6a58f8f658f1b1ebdefd4b83eb1bccc879e7885e54dabdd13930073ad81"} Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.133852 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8mbzs" event={"ID":"16f31204-e29f-41f1-b560-814671c5967a","Type":"ContainerStarted","Data":"14c665223b13a59096d5205cd07a87a2a7652ec3e1f548b79830be0a2261f346"} Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.134893 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xphfz" Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.171937 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xphfz"] Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.181242 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xphfz"] Dec 02 10:15:32 crc kubenswrapper[4685]: I1202 10:15:32.182514 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-8mbzs" podStartSLOduration=1.9691913140000001 podStartE2EDuration="15.182490306s" podCreationTimestamp="2025-12-02 10:15:17 +0000 UTC" firstStartedPulling="2025-12-02 10:15:18.381552086 +0000 UTC m=+810.753326240" lastFinishedPulling="2025-12-02 10:15:31.594851078 +0000 UTC m=+823.966625232" observedRunningTime="2025-12-02 10:15:32.177093258 +0000 UTC m=+824.548867422" watchObservedRunningTime="2025-12-02 10:15:32.182490306 +0000 UTC m=+824.554264470" Dec 02 10:15:33 crc kubenswrapper[4685]: I1202 10:15:33.251901 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxf9v"] Dec 02 10:15:33 crc kubenswrapper[4685]: I1202 10:15:33.252274 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lxf9v" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="registry-server" containerID="cri-o://4f12fbc7596844b9a7c3f1d27e2c3551aca52a89891969adf960c001786b215f" gracePeriod=2 Dec 02 10:15:33 crc kubenswrapper[4685]: I1202 10:15:33.910987 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" path="/var/lib/kubelet/pods/7cf41ade-7601-4b2a-8705-76d9f1e50212/volumes" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.157533 4685 generic.go:334] "Generic (PLEG): container finished" podID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerID="4f12fbc7596844b9a7c3f1d27e2c3551aca52a89891969adf960c001786b215f" exitCode=0 Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.157716 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxf9v" event={"ID":"850a45a9-49fe-474d-bbea-045b9a08dee4","Type":"ContainerDied","Data":"4f12fbc7596844b9a7c3f1d27e2c3551aca52a89891969adf960c001786b215f"} Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.157764 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lxf9v" event={"ID":"850a45a9-49fe-474d-bbea-045b9a08dee4","Type":"ContainerDied","Data":"52428c112686283bdb1cbefd651658e87774ff29fdb3b061832debf2362bc333"} Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.157777 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52428c112686283bdb1cbefd651658e87774ff29fdb3b061832debf2362bc333" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.192302 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.297211 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxdgn\" (UniqueName: \"kubernetes.io/projected/850a45a9-49fe-474d-bbea-045b9a08dee4-kube-api-access-jxdgn\") pod \"850a45a9-49fe-474d-bbea-045b9a08dee4\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.297266 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-catalog-content\") pod \"850a45a9-49fe-474d-bbea-045b9a08dee4\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.297373 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-utilities\") pod \"850a45a9-49fe-474d-bbea-045b9a08dee4\" (UID: \"850a45a9-49fe-474d-bbea-045b9a08dee4\") " Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.298451 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-utilities" (OuterVolumeSpecName: "utilities") pod "850a45a9-49fe-474d-bbea-045b9a08dee4" (UID: "850a45a9-49fe-474d-bbea-045b9a08dee4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.310809 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/850a45a9-49fe-474d-bbea-045b9a08dee4-kube-api-access-jxdgn" (OuterVolumeSpecName: "kube-api-access-jxdgn") pod "850a45a9-49fe-474d-bbea-045b9a08dee4" (UID: "850a45a9-49fe-474d-bbea-045b9a08dee4"). InnerVolumeSpecName "kube-api-access-jxdgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.320460 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "850a45a9-49fe-474d-bbea-045b9a08dee4" (UID: "850a45a9-49fe-474d-bbea-045b9a08dee4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.399399 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.399470 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/850a45a9-49fe-474d-bbea-045b9a08dee4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:34 crc kubenswrapper[4685]: I1202 10:15:34.399483 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxdgn\" (UniqueName: \"kubernetes.io/projected/850a45a9-49fe-474d-bbea-045b9a08dee4-kube-api-access-jxdgn\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:35 crc kubenswrapper[4685]: I1202 10:15:35.162420 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lxf9v" Dec 02 10:15:35 crc kubenswrapper[4685]: I1202 10:15:35.190810 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxf9v"] Dec 02 10:15:35 crc kubenswrapper[4685]: I1202 10:15:35.195129 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lxf9v"] Dec 02 10:15:35 crc kubenswrapper[4685]: I1202 10:15:35.910354 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" path="/var/lib/kubelet/pods/850a45a9-49fe-474d-bbea-045b9a08dee4/volumes" Dec 02 10:15:38 crc kubenswrapper[4685]: I1202 10:15:38.172706 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:38 crc kubenswrapper[4685]: I1202 10:15:38.172965 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:38 crc kubenswrapper[4685]: I1202 10:15:38.200908 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:38 crc kubenswrapper[4685]: I1202 10:15:38.244481 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-8mbzs" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.198921 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgv9j" event={"ID":"f138872d-e96a-4dcb-8400-c2d5982dd07c","Type":"ContainerStarted","Data":"9caa15f4da0335dc9ac5fa7c0e664502013617f979680438f75d9c347c645f07"} Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.729804 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4"] Dec 02 10:15:39 crc kubenswrapper[4685]: E1202 10:15:39.730162 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="extract-content" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730184 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="extract-content" Dec 02 10:15:39 crc kubenswrapper[4685]: E1202 10:15:39.730200 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="extract-utilities" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730207 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="extract-utilities" Dec 02 10:15:39 crc kubenswrapper[4685]: E1202 10:15:39.730216 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="registry-server" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730226 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="registry-server" Dec 02 10:15:39 crc kubenswrapper[4685]: E1202 10:15:39.730237 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="extract-utilities" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730245 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="extract-utilities" Dec 02 10:15:39 crc kubenswrapper[4685]: E1202 10:15:39.730270 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="registry-server" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730278 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="registry-server" Dec 02 10:15:39 crc kubenswrapper[4685]: E1202 10:15:39.730290 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="extract-content" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730300 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="extract-content" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730426 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cf41ade-7601-4b2a-8705-76d9f1e50212" containerName="registry-server" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.730446 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="850a45a9-49fe-474d-bbea-045b9a08dee4" containerName="registry-server" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.731517 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.733674 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-ppz4w" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.747692 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4"] Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.870478 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-util\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.870527 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-bundle\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.870614 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8xb8\" (UniqueName: \"kubernetes.io/projected/dc08c183-1584-4e4c-baf9-eff33de7b396-kube-api-access-v8xb8\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.971612 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8xb8\" (UniqueName: \"kubernetes.io/projected/dc08c183-1584-4e4c-baf9-eff33de7b396-kube-api-access-v8xb8\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.971696 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-util\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.971729 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-bundle\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.972721 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-util\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.972794 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-bundle\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:39 crc kubenswrapper[4685]: I1202 10:15:39.993887 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8xb8\" (UniqueName: \"kubernetes.io/projected/dc08c183-1584-4e4c-baf9-eff33de7b396-kube-api-access-v8xb8\") pod \"8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:40 crc kubenswrapper[4685]: I1202 10:15:40.045194 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:40 crc kubenswrapper[4685]: I1202 10:15:40.208656 4685 generic.go:334] "Generic (PLEG): container finished" podID="f138872d-e96a-4dcb-8400-c2d5982dd07c" containerID="9caa15f4da0335dc9ac5fa7c0e664502013617f979680438f75d9c347c645f07" exitCode=0 Dec 02 10:15:40 crc kubenswrapper[4685]: I1202 10:15:40.208697 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgv9j" event={"ID":"f138872d-e96a-4dcb-8400-c2d5982dd07c","Type":"ContainerDied","Data":"9caa15f4da0335dc9ac5fa7c0e664502013617f979680438f75d9c347c645f07"} Dec 02 10:15:40 crc kubenswrapper[4685]: I1202 10:15:40.254346 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4"] Dec 02 10:15:41 crc kubenswrapper[4685]: I1202 10:15:41.216364 4685 generic.go:334] "Generic (PLEG): container finished" podID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerID="8121297f4c0a536e44d4e510f77b1e521272de26c0ee11912033d5c22b12d305" exitCode=0 Dec 02 10:15:41 crc kubenswrapper[4685]: I1202 10:15:41.216522 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" event={"ID":"dc08c183-1584-4e4c-baf9-eff33de7b396","Type":"ContainerDied","Data":"8121297f4c0a536e44d4e510f77b1e521272de26c0ee11912033d5c22b12d305"} Dec 02 10:15:41 crc kubenswrapper[4685]: I1202 10:15:41.216745 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" event={"ID":"dc08c183-1584-4e4c-baf9-eff33de7b396","Type":"ContainerStarted","Data":"aa09029dbc6a1696406f6c1315e86fb64d912300366868a098f6fb2b6524f3d0"} Dec 02 10:15:41 crc kubenswrapper[4685]: I1202 10:15:41.218782 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cgv9j" event={"ID":"f138872d-e96a-4dcb-8400-c2d5982dd07c","Type":"ContainerStarted","Data":"c2fda58b8c4b1add2ef5e443e39a28c4c192452c6d948816aefbff8ccfb2feda"} Dec 02 10:15:41 crc kubenswrapper[4685]: I1202 10:15:41.258919 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cgv9j" podStartSLOduration=8.46026009 podStartE2EDuration="17.258898971s" podCreationTimestamp="2025-12-02 10:15:24 +0000 UTC" firstStartedPulling="2025-12-02 10:15:32.133048228 +0000 UTC m=+824.504822382" lastFinishedPulling="2025-12-02 10:15:40.931687099 +0000 UTC m=+833.303461263" observedRunningTime="2025-12-02 10:15:41.252218118 +0000 UTC m=+833.623992292" watchObservedRunningTime="2025-12-02 10:15:41.258898971 +0000 UTC m=+833.630673135" Dec 02 10:15:42 crc kubenswrapper[4685]: I1202 10:15:42.226639 4685 generic.go:334] "Generic (PLEG): container finished" podID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerID="afa52bc708892f9ddde0dd66b6c3a8bfb3da278287d36e5fe6b8198fa0f97330" exitCode=0 Dec 02 10:15:42 crc kubenswrapper[4685]: I1202 10:15:42.227723 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" event={"ID":"dc08c183-1584-4e4c-baf9-eff33de7b396","Type":"ContainerDied","Data":"afa52bc708892f9ddde0dd66b6c3a8bfb3da278287d36e5fe6b8198fa0f97330"} Dec 02 10:15:43 crc kubenswrapper[4685]: I1202 10:15:43.236155 4685 generic.go:334] "Generic (PLEG): container finished" podID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerID="c3fee5fafd4d178fc22a6f3a94ee25ba9818047494cbd2f85ae0ed32686826b2" exitCode=0 Dec 02 10:15:43 crc kubenswrapper[4685]: I1202 10:15:43.236480 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" event={"ID":"dc08c183-1584-4e4c-baf9-eff33de7b396","Type":"ContainerDied","Data":"c3fee5fafd4d178fc22a6f3a94ee25ba9818047494cbd2f85ae0ed32686826b2"} Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.476173 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.577799 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.578220 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.631022 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.633423 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-bundle\") pod \"dc08c183-1584-4e4c-baf9-eff33de7b396\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.633475 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-util\") pod \"dc08c183-1584-4e4c-baf9-eff33de7b396\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.633635 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8xb8\" (UniqueName: \"kubernetes.io/projected/dc08c183-1584-4e4c-baf9-eff33de7b396-kube-api-access-v8xb8\") pod \"dc08c183-1584-4e4c-baf9-eff33de7b396\" (UID: \"dc08c183-1584-4e4c-baf9-eff33de7b396\") " Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.637602 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-bundle" (OuterVolumeSpecName: "bundle") pod "dc08c183-1584-4e4c-baf9-eff33de7b396" (UID: "dc08c183-1584-4e4c-baf9-eff33de7b396"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.641323 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc08c183-1584-4e4c-baf9-eff33de7b396-kube-api-access-v8xb8" (OuterVolumeSpecName: "kube-api-access-v8xb8") pod "dc08c183-1584-4e4c-baf9-eff33de7b396" (UID: "dc08c183-1584-4e4c-baf9-eff33de7b396"). InnerVolumeSpecName "kube-api-access-v8xb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.656664 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-util" (OuterVolumeSpecName: "util") pod "dc08c183-1584-4e4c-baf9-eff33de7b396" (UID: "dc08c183-1584-4e4c-baf9-eff33de7b396"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.735861 4685 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-util\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.735896 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8xb8\" (UniqueName: \"kubernetes.io/projected/dc08c183-1584-4e4c-baf9-eff33de7b396-kube-api-access-v8xb8\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:44 crc kubenswrapper[4685]: I1202 10:15:44.735907 4685 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dc08c183-1584-4e4c-baf9-eff33de7b396-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:45 crc kubenswrapper[4685]: I1202 10:15:45.251155 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" Dec 02 10:15:45 crc kubenswrapper[4685]: I1202 10:15:45.251167 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4" event={"ID":"dc08c183-1584-4e4c-baf9-eff33de7b396","Type":"ContainerDied","Data":"aa09029dbc6a1696406f6c1315e86fb64d912300366868a098f6fb2b6524f3d0"} Dec 02 10:15:45 crc kubenswrapper[4685]: I1202 10:15:45.251225 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa09029dbc6a1696406f6c1315e86fb64d912300366868a098f6fb2b6524f3d0" Dec 02 10:15:46 crc kubenswrapper[4685]: I1202 10:15:46.291028 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cgv9j" Dec 02 10:15:47 crc kubenswrapper[4685]: I1202 10:15:47.680116 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cgv9j"] Dec 02 10:15:48 crc kubenswrapper[4685]: I1202 10:15:48.050567 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g2zlc"] Dec 02 10:15:48 crc kubenswrapper[4685]: I1202 10:15:48.051097 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g2zlc" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="registry-server" containerID="cri-o://c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" gracePeriod=2 Dec 02 10:15:48 crc kubenswrapper[4685]: E1202 10:15:48.411016 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12 is running failed: container process not found" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:15:48 crc kubenswrapper[4685]: E1202 10:15:48.411804 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12 is running failed: container process not found" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:15:48 crc kubenswrapper[4685]: E1202 10:15:48.412126 4685 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12 is running failed: container process not found" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" cmd=["grpc_health_probe","-addr=:50051"] Dec 02 10:15:48 crc kubenswrapper[4685]: E1202 10:15:48.412193 4685 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-g2zlc" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="registry-server" Dec 02 10:15:48 crc kubenswrapper[4685]: I1202 10:15:48.984609 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.093624 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-catalog-content\") pod \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.093698 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-utilities\") pod \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.093750 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ldc9\" (UniqueName: \"kubernetes.io/projected/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-kube-api-access-2ldc9\") pod \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\" (UID: \"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4\") " Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.094526 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-utilities" (OuterVolumeSpecName: "utilities") pod "3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" (UID: "3d0f6867-a4f6-4e00-bce4-cea8f7d301a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.100901 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-kube-api-access-2ldc9" (OuterVolumeSpecName: "kube-api-access-2ldc9") pod "3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" (UID: "3d0f6867-a4f6-4e00-bce4-cea8f7d301a4"). InnerVolumeSpecName "kube-api-access-2ldc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.138074 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" (UID: "3d0f6867-a4f6-4e00-bce4-cea8f7d301a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.195461 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.195495 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.195506 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ldc9\" (UniqueName: \"kubernetes.io/projected/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4-kube-api-access-2ldc9\") on node \"crc\" DevicePath \"\"" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.281693 4685 generic.go:334] "Generic (PLEG): container finished" podID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" exitCode=0 Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.281745 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2zlc" event={"ID":"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4","Type":"ContainerDied","Data":"c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12"} Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.281775 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g2zlc" event={"ID":"3d0f6867-a4f6-4e00-bce4-cea8f7d301a4","Type":"ContainerDied","Data":"433ed3a5fef13922376bdb192816a7249f2b15a6660ebe1512fb0abab2c9a018"} Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.281775 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g2zlc" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.281795 4685 scope.go:117] "RemoveContainer" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.308781 4685 scope.go:117] "RemoveContainer" containerID="6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.339224 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g2zlc"] Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.346618 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g2zlc"] Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.348154 4685 scope.go:117] "RemoveContainer" containerID="a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.381214 4685 scope.go:117] "RemoveContainer" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" Dec 02 10:15:49 crc kubenswrapper[4685]: E1202 10:15:49.381838 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12\": container with ID starting with c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12 not found: ID does not exist" containerID="c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.382000 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12"} err="failed to get container status \"c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12\": rpc error: code = NotFound desc = could not find container \"c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12\": container with ID starting with c22be2e150b949269846b18193af63b753d59d8aa06d77c137a20bbbdd3ade12 not found: ID does not exist" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.382109 4685 scope.go:117] "RemoveContainer" containerID="6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741" Dec 02 10:15:49 crc kubenswrapper[4685]: E1202 10:15:49.382743 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741\": container with ID starting with 6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741 not found: ID does not exist" containerID="6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.382817 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741"} err="failed to get container status \"6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741\": rpc error: code = NotFound desc = could not find container \"6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741\": container with ID starting with 6c0cb17085cf69c0b1d34017d5636db61453cc626f493622039d7a4a89945741 not found: ID does not exist" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.382853 4685 scope.go:117] "RemoveContainer" containerID="a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491" Dec 02 10:15:49 crc kubenswrapper[4685]: E1202 10:15:49.383224 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491\": container with ID starting with a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491 not found: ID does not exist" containerID="a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.383396 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491"} err="failed to get container status \"a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491\": rpc error: code = NotFound desc = could not find container \"a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491\": container with ID starting with a6235a0a7b70d6189fc8f26576a59fe6ffcb6d8c49e597f87e01a9d7ce8fe491 not found: ID does not exist" Dec 02 10:15:49 crc kubenswrapper[4685]: I1202 10:15:49.908872 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" path="/var/lib/kubelet/pods/3d0f6867-a4f6-4e00-bce4-cea8f7d301a4/volumes" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.875773 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb"] Dec 02 10:15:50 crc kubenswrapper[4685]: E1202 10:15:50.876345 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="util" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876357 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="util" Dec 02 10:15:50 crc kubenswrapper[4685]: E1202 10:15:50.876368 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="pull" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876373 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="pull" Dec 02 10:15:50 crc kubenswrapper[4685]: E1202 10:15:50.876380 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="extract-utilities" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876386 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="extract-utilities" Dec 02 10:15:50 crc kubenswrapper[4685]: E1202 10:15:50.876396 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="extract" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876402 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="extract" Dec 02 10:15:50 crc kubenswrapper[4685]: E1202 10:15:50.876421 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="extract-content" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876427 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="extract-content" Dec 02 10:15:50 crc kubenswrapper[4685]: E1202 10:15:50.876437 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="registry-server" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876443 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="registry-server" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876544 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc08c183-1584-4e4c-baf9-eff33de7b396" containerName="extract" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876552 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d0f6867-a4f6-4e00-bce4-cea8f7d301a4" containerName="registry-server" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.876982 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:15:50 crc kubenswrapper[4685]: I1202 10:15:50.878860 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-rqr2c" Dec 02 10:15:51 crc kubenswrapper[4685]: I1202 10:15:51.014055 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb"] Dec 02 10:15:51 crc kubenswrapper[4685]: I1202 10:15:51.019750 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ljpf\" (UniqueName: \"kubernetes.io/projected/6e78ca06-e6a2-4973-a8df-593409527f5e-kube-api-access-6ljpf\") pod \"openstack-operator-controller-operator-b866867cf-zf2zb\" (UID: \"6e78ca06-e6a2-4973-a8df-593409527f5e\") " pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:15:51 crc kubenswrapper[4685]: I1202 10:15:51.121278 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ljpf\" (UniqueName: \"kubernetes.io/projected/6e78ca06-e6a2-4973-a8df-593409527f5e-kube-api-access-6ljpf\") pod \"openstack-operator-controller-operator-b866867cf-zf2zb\" (UID: \"6e78ca06-e6a2-4973-a8df-593409527f5e\") " pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:15:51 crc kubenswrapper[4685]: I1202 10:15:51.150757 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ljpf\" (UniqueName: \"kubernetes.io/projected/6e78ca06-e6a2-4973-a8df-593409527f5e-kube-api-access-6ljpf\") pod \"openstack-operator-controller-operator-b866867cf-zf2zb\" (UID: \"6e78ca06-e6a2-4973-a8df-593409527f5e\") " pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:15:51 crc kubenswrapper[4685]: I1202 10:15:51.196273 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:15:51 crc kubenswrapper[4685]: I1202 10:15:51.524578 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb"] Dec 02 10:15:52 crc kubenswrapper[4685]: I1202 10:15:52.310985 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" event={"ID":"6e78ca06-e6a2-4973-a8df-593409527f5e","Type":"ContainerStarted","Data":"1d3e1d3eaa201a5dcd5209714f760d0a16d85c5e84100054ef864db19bac3498"} Dec 02 10:16:00 crc kubenswrapper[4685]: I1202 10:16:00.355472 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" event={"ID":"6e78ca06-e6a2-4973-a8df-593409527f5e","Type":"ContainerStarted","Data":"d1649b5bb81eaefca169c659b9df8d3324604582c7558ef15c26bd517cec662c"} Dec 02 10:16:00 crc kubenswrapper[4685]: I1202 10:16:00.356741 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:16:00 crc kubenswrapper[4685]: I1202 10:16:00.384516 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" podStartSLOduration=2.7650977020000003 podStartE2EDuration="10.384498124s" podCreationTimestamp="2025-12-02 10:15:50 +0000 UTC" firstStartedPulling="2025-12-02 10:15:51.53483209 +0000 UTC m=+843.906606244" lastFinishedPulling="2025-12-02 10:15:59.154232502 +0000 UTC m=+851.526006666" observedRunningTime="2025-12-02 10:16:00.381730877 +0000 UTC m=+852.753505041" watchObservedRunningTime="2025-12-02 10:16:00.384498124 +0000 UTC m=+852.756272278" Dec 02 10:16:11 crc kubenswrapper[4685]: I1202 10:16:11.198926 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-b866867cf-zf2zb" Dec 02 10:16:12 crc kubenswrapper[4685]: I1202 10:16:12.147484 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:16:12 crc kubenswrapper[4685]: I1202 10:16:12.147807 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.156546 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.158520 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.161662 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-nncg6" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.188021 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.189405 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.193155 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.202803 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.205628 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-f9f2h" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.209025 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.210129 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.211824 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-x62bl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.223284 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.224321 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.227059 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.229938 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-jrqdl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.274822 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.303420 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.304771 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.318789 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-4jrnr" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.320601 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7hxl\" (UniqueName: \"kubernetes.io/projected/544b6fe9-890e-4b17-8f8e-55f53d64fcf7-kube-api-access-q7hxl\") pod \"barbican-operator-controller-manager-7d9dfd778-7qgfl\" (UID: \"544b6fe9-890e-4b17-8f8e-55f53d64fcf7\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.320670 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s2vp\" (UniqueName: \"kubernetes.io/projected/901441b3-91d9-4edf-8955-cd5514589dec-kube-api-access-8s2vp\") pod \"glance-operator-controller-manager-77987cd8cd-vn545\" (UID: \"901441b3-91d9-4edf-8955-cd5514589dec\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.320749 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjqgh\" (UniqueName: \"kubernetes.io/projected/dd4f4fd1-38b6-4732-bab1-96a522d34e53-kube-api-access-wjqgh\") pod \"designate-operator-controller-manager-78b4bc895b-9s2nf\" (UID: \"dd4f4fd1-38b6-4732-bab1-96a522d34e53\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.320807 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vt9b\" (UniqueName: \"kubernetes.io/projected/cccf3baf-f063-4962-8856-c80c78439b82-kube-api-access-7vt9b\") pod \"cinder-operator-controller-manager-859b6ccc6-855qm\" (UID: \"cccf3baf-f063-4962-8856-c80c78439b82\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.332126 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.333447 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.337740 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-5kt8g" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.349872 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.370305 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.387188 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-p62sl"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.388179 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.391125 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.391441 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gxv9j" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.416637 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-p62sl"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.423383 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjqgh\" (UniqueName: \"kubernetes.io/projected/dd4f4fd1-38b6-4732-bab1-96a522d34e53-kube-api-access-wjqgh\") pod \"designate-operator-controller-manager-78b4bc895b-9s2nf\" (UID: \"dd4f4fd1-38b6-4732-bab1-96a522d34e53\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.423439 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vt9b\" (UniqueName: \"kubernetes.io/projected/cccf3baf-f063-4962-8856-c80c78439b82-kube-api-access-7vt9b\") pod \"cinder-operator-controller-manager-859b6ccc6-855qm\" (UID: \"cccf3baf-f063-4962-8856-c80c78439b82\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.423481 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nvdm\" (UniqueName: \"kubernetes.io/projected/739ae795-6209-4881-8bf6-be391a810a86-kube-api-access-4nvdm\") pod \"horizon-operator-controller-manager-68c6d99b8f-xnk7k\" (UID: \"739ae795-6209-4881-8bf6-be391a810a86\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.423504 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7hxl\" (UniqueName: \"kubernetes.io/projected/544b6fe9-890e-4b17-8f8e-55f53d64fcf7-kube-api-access-q7hxl\") pod \"barbican-operator-controller-manager-7d9dfd778-7qgfl\" (UID: \"544b6fe9-890e-4b17-8f8e-55f53d64fcf7\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.423528 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtc5l\" (UniqueName: \"kubernetes.io/projected/b16756f6-29e0-4e33-8e00-f7b0e193b958-kube-api-access-dtc5l\") pod \"heat-operator-controller-manager-5f64f6f8bb-xqspp\" (UID: \"b16756f6-29e0-4e33-8e00-f7b0e193b958\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.423572 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s2vp\" (UniqueName: \"kubernetes.io/projected/901441b3-91d9-4edf-8955-cd5514589dec-kube-api-access-8s2vp\") pod \"glance-operator-controller-manager-77987cd8cd-vn545\" (UID: \"901441b3-91d9-4edf-8955-cd5514589dec\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.434711 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.435747 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.439488 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-v6tpr" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.467475 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjqgh\" (UniqueName: \"kubernetes.io/projected/dd4f4fd1-38b6-4732-bab1-96a522d34e53-kube-api-access-wjqgh\") pod \"designate-operator-controller-manager-78b4bc895b-9s2nf\" (UID: \"dd4f4fd1-38b6-4732-bab1-96a522d34e53\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.469594 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.471241 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.490342 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-vttfm" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.494209 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s2vp\" (UniqueName: \"kubernetes.io/projected/901441b3-91d9-4edf-8955-cd5514589dec-kube-api-access-8s2vp\") pod \"glance-operator-controller-manager-77987cd8cd-vn545\" (UID: \"901441b3-91d9-4edf-8955-cd5514589dec\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.504155 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.516290 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7hxl\" (UniqueName: \"kubernetes.io/projected/544b6fe9-890e-4b17-8f8e-55f53d64fcf7-kube-api-access-q7hxl\") pod \"barbican-operator-controller-manager-7d9dfd778-7qgfl\" (UID: \"544b6fe9-890e-4b17-8f8e-55f53d64fcf7\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.520388 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vt9b\" (UniqueName: \"kubernetes.io/projected/cccf3baf-f063-4962-8856-c80c78439b82-kube-api-access-7vt9b\") pod \"cinder-operator-controller-manager-859b6ccc6-855qm\" (UID: \"cccf3baf-f063-4962-8856-c80c78439b82\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.525842 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.526311 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nvdm\" (UniqueName: \"kubernetes.io/projected/739ae795-6209-4881-8bf6-be391a810a86-kube-api-access-4nvdm\") pod \"horizon-operator-controller-manager-68c6d99b8f-xnk7k\" (UID: \"739ae795-6209-4881-8bf6-be391a810a86\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.526385 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtc5l\" (UniqueName: \"kubernetes.io/projected/b16756f6-29e0-4e33-8e00-f7b0e193b958-kube-api-access-dtc5l\") pod \"heat-operator-controller-manager-5f64f6f8bb-xqspp\" (UID: \"b16756f6-29e0-4e33-8e00-f7b0e193b958\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.526452 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cptkd\" (UniqueName: \"kubernetes.io/projected/36189be8-91c8-4b60-90d6-050a07ae86d3-kube-api-access-cptkd\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.526501 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.548960 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.550132 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.559368 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-wtdjd" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.559751 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.560295 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.594648 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nvdm\" (UniqueName: \"kubernetes.io/projected/739ae795-6209-4881-8bf6-be391a810a86-kube-api-access-4nvdm\") pod \"horizon-operator-controller-manager-68c6d99b8f-xnk7k\" (UID: \"739ae795-6209-4881-8bf6-be391a810a86\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.597024 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.598291 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.621035 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-kwbvz" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.621926 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtc5l\" (UniqueName: \"kubernetes.io/projected/b16756f6-29e0-4e33-8e00-f7b0e193b958-kube-api-access-dtc5l\") pod \"heat-operator-controller-manager-5f64f6f8bb-xqspp\" (UID: \"b16756f6-29e0-4e33-8e00-f7b0e193b958\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.626884 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.627108 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cptkd\" (UniqueName: \"kubernetes.io/projected/36189be8-91c8-4b60-90d6-050a07ae86d3-kube-api-access-cptkd\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.627149 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6cnk\" (UniqueName: \"kubernetes.io/projected/bb0c5cd2-9459-4e31-8613-f758d330dce2-kube-api-access-d6cnk\") pod \"ironic-operator-controller-manager-6c548fd776-vz7bq\" (UID: \"bb0c5cd2-9459-4e31-8613-f758d330dce2\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.627176 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.627197 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzm87\" (UniqueName: \"kubernetes.io/projected/6513f83d-2079-477d-8976-68cb969806fe-kube-api-access-vzm87\") pod \"keystone-operator-controller-manager-7765d96ddf-5sn8t\" (UID: \"6513f83d-2079-477d-8976-68cb969806fe\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:16:35 crc kubenswrapper[4685]: E1202 10:16:35.627395 4685 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:35 crc kubenswrapper[4685]: E1202 10:16:35.627446 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert podName:36189be8-91c8-4b60-90d6-050a07ae86d3 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:36.127428789 +0000 UTC m=+888.499202943 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert") pod "infra-operator-controller-manager-57548d458d-p62sl" (UID: "36189be8-91c8-4b60-90d6-050a07ae86d3") : secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.655912 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.701036 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.748120 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.767992 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cptkd\" (UniqueName: \"kubernetes.io/projected/36189be8-91c8-4b60-90d6-050a07ae86d3-kube-api-access-cptkd\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.770000 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzm87\" (UniqueName: \"kubernetes.io/projected/6513f83d-2079-477d-8976-68cb969806fe-kube-api-access-vzm87\") pod \"keystone-operator-controller-manager-7765d96ddf-5sn8t\" (UID: \"6513f83d-2079-477d-8976-68cb969806fe\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.771254 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6khv\" (UniqueName: \"kubernetes.io/projected/bc4b6647-31ea-45d0-ac59-b8b1cef80aeb-kube-api-access-c6khv\") pod \"mariadb-operator-controller-manager-56bbcc9d85-9l2jq\" (UID: \"bc4b6647-31ea-45d0-ac59-b8b1cef80aeb\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.778104 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.823834 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.823924 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2t66\" (UniqueName: \"kubernetes.io/projected/59c1b39b-2153-4c88-9229-0e951b086fdd-kube-api-access-m2t66\") pod \"manila-operator-controller-manager-7c79b5df47-rngtk\" (UID: \"59c1b39b-2153-4c88-9229-0e951b086fdd\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.824042 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6cnk\" (UniqueName: \"kubernetes.io/projected/bb0c5cd2-9459-4e31-8613-f758d330dce2-kube-api-access-d6cnk\") pod \"ironic-operator-controller-manager-6c548fd776-vz7bq\" (UID: \"bb0c5cd2-9459-4e31-8613-f758d330dce2\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.826444 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-99hsn" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.827425 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.883646 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.889220 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6cnk\" (UniqueName: \"kubernetes.io/projected/bb0c5cd2-9459-4e31-8613-f758d330dce2-kube-api-access-d6cnk\") pod \"ironic-operator-controller-manager-6c548fd776-vz7bq\" (UID: \"bb0c5cd2-9459-4e31-8613-f758d330dce2\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.900361 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl"] Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.916184 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.928769 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzm87\" (UniqueName: \"kubernetes.io/projected/6513f83d-2079-477d-8976-68cb969806fe-kube-api-access-vzm87\") pod \"keystone-operator-controller-manager-7765d96ddf-5sn8t\" (UID: \"6513f83d-2079-477d-8976-68cb969806fe\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.929534 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-njrs6" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.940252 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfsln\" (UniqueName: \"kubernetes.io/projected/0a4bb15b-4ada-4698-9747-dfa600f319d3-kube-api-access-gfsln\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-7tmr8\" (UID: \"0a4bb15b-4ada-4698-9747-dfa600f319d3\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.940331 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6khv\" (UniqueName: \"kubernetes.io/projected/bc4b6647-31ea-45d0-ac59-b8b1cef80aeb-kube-api-access-c6khv\") pod \"mariadb-operator-controller-manager-56bbcc9d85-9l2jq\" (UID: \"bc4b6647-31ea-45d0-ac59-b8b1cef80aeb\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.940437 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2t66\" (UniqueName: \"kubernetes.io/projected/59c1b39b-2153-4c88-9229-0e951b086fdd-kube-api-access-m2t66\") pod \"manila-operator-controller-manager-7c79b5df47-rngtk\" (UID: \"59c1b39b-2153-4c88-9229-0e951b086fdd\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:16:35 crc kubenswrapper[4685]: I1202 10:16:35.961861 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.005263 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.023267 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2t66\" (UniqueName: \"kubernetes.io/projected/59c1b39b-2153-4c88-9229-0e951b086fdd-kube-api-access-m2t66\") pod \"manila-operator-controller-manager-7c79b5df47-rngtk\" (UID: \"59c1b39b-2153-4c88-9229-0e951b086fdd\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.024267 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6khv\" (UniqueName: \"kubernetes.io/projected/bc4b6647-31ea-45d0-ac59-b8b1cef80aeb-kube-api-access-c6khv\") pod \"mariadb-operator-controller-manager-56bbcc9d85-9l2jq\" (UID: \"bc4b6647-31ea-45d0-ac59-b8b1cef80aeb\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.045348 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n8nk\" (UniqueName: \"kubernetes.io/projected/e88f923e-f7cc-4292-a49c-483be1e7848e-kube-api-access-7n8nk\") pod \"nova-operator-controller-manager-697bc559fc-bqctl\" (UID: \"e88f923e-f7cc-4292-a49c-483be1e7848e\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.045577 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfsln\" (UniqueName: \"kubernetes.io/projected/0a4bb15b-4ada-4698-9747-dfa600f319d3-kube-api-access-gfsln\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-7tmr8\" (UID: \"0a4bb15b-4ada-4698-9747-dfa600f319d3\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.050538 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.070472 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.091258 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfsln\" (UniqueName: \"kubernetes.io/projected/0a4bb15b-4ada-4698-9747-dfa600f319d3-kube-api-access-gfsln\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-7tmr8\" (UID: \"0a4bb15b-4ada-4698-9747-dfa600f319d3\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.102166 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.103311 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.108029 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-6zkrw" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.113884 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.126462 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.146384 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.146498 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n8nk\" (UniqueName: \"kubernetes.io/projected/e88f923e-f7cc-4292-a49c-483be1e7848e-kube-api-access-7n8nk\") pod \"nova-operator-controller-manager-697bc559fc-bqctl\" (UID: \"e88f923e-f7cc-4292-a49c-483be1e7848e\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.146676 4685 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.146771 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert podName:36189be8-91c8-4b60-90d6-050a07ae86d3 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:37.146746406 +0000 UTC m=+889.518520610 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert") pod "infra-operator-controller-manager-57548d458d-p62sl" (UID: "36189be8-91c8-4b60-90d6-050a07ae86d3") : secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.166745 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.170289 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.187378 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-zc6q7" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.187988 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.199752 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.204142 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.204146 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n8nk\" (UniqueName: \"kubernetes.io/projected/e88f923e-f7cc-4292-a49c-483be1e7848e-kube-api-access-7n8nk\") pod \"nova-operator-controller-manager-697bc559fc-bqctl\" (UID: \"e88f923e-f7cc-4292-a49c-483be1e7848e\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.211973 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.221977 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.227579 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-27vgv" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.245228 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.248537 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqrgp\" (UniqueName: \"kubernetes.io/projected/20b551a4-85bc-4ecb-b502-08f844a6b911-kube-api-access-tqrgp\") pod \"octavia-operator-controller-manager-998648c74-vx7jq\" (UID: \"20b551a4-85bc-4ecb-b502-08f844a6b911\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.248594 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glmk5\" (UniqueName: \"kubernetes.io/projected/30be1eaf-4d63-4fb6-9372-0857432b6b73-kube-api-access-glmk5\") pod \"ovn-operator-controller-manager-b6456fdb6-njgr6\" (UID: \"30be1eaf-4d63-4fb6-9372-0857432b6b73\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.248652 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c24vv\" (UniqueName: \"kubernetes.io/projected/50904294-59b3-4a71-84f9-8f171cad02e2-kube-api-access-c24vv\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.248707 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.253518 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.254947 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.264780 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-vw8j6" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.273209 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.273767 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.283650 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.298758 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.311903 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.319910 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.321387 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.323852 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-hh5kz" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.340159 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.341352 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.351844 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glmk5\" (UniqueName: \"kubernetes.io/projected/30be1eaf-4d63-4fb6-9372-0857432b6b73-kube-api-access-glmk5\") pod \"ovn-operator-controller-manager-b6456fdb6-njgr6\" (UID: \"30be1eaf-4d63-4fb6-9372-0857432b6b73\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.351948 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c24vv\" (UniqueName: \"kubernetes.io/projected/50904294-59b3-4a71-84f9-8f171cad02e2-kube-api-access-c24vv\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.351981 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tx5x\" (UniqueName: \"kubernetes.io/projected/e55320ae-8458-4802-aa07-e406f1b58fce-kube-api-access-5tx5x\") pod \"swift-operator-controller-manager-695b4bc5dc-jxvqs\" (UID: \"e55320ae-8458-4802-aa07-e406f1b58fce\") " pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.352021 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgkcq\" (UniqueName: \"kubernetes.io/projected/1177803f-ea41-40ed-8b1d-58c6761363f0-kube-api-access-qgkcq\") pod \"placement-operator-controller-manager-78f8948974-j5hhn\" (UID: \"1177803f-ea41-40ed-8b1d-58c6761363f0\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.352081 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.352128 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqrgp\" (UniqueName: \"kubernetes.io/projected/20b551a4-85bc-4ecb-b502-08f844a6b911-kube-api-access-tqrgp\") pod \"octavia-operator-controller-manager-998648c74-vx7jq\" (UID: \"20b551a4-85bc-4ecb-b502-08f844a6b911\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.353415 4685 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.353439 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-lzsbr" Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.353472 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert podName:50904294-59b3-4a71-84f9-8f171cad02e2 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:36.853453231 +0000 UTC m=+889.225227385 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" (UID: "50904294-59b3-4a71-84f9-8f171cad02e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.353594 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.353709 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-rpccf" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.367320 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.376917 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.398389 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c24vv\" (UniqueName: \"kubernetes.io/projected/50904294-59b3-4a71-84f9-8f171cad02e2-kube-api-access-c24vv\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.404851 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.404959 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.407372 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glmk5\" (UniqueName: \"kubernetes.io/projected/30be1eaf-4d63-4fb6-9372-0857432b6b73-kube-api-access-glmk5\") pod \"ovn-operator-controller-manager-b6456fdb6-njgr6\" (UID: \"30be1eaf-4d63-4fb6-9372-0857432b6b73\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.415244 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqrgp\" (UniqueName: \"kubernetes.io/projected/20b551a4-85bc-4ecb-b502-08f844a6b911-kube-api-access-tqrgp\") pod \"octavia-operator-controller-manager-998648c74-vx7jq\" (UID: \"20b551a4-85bc-4ecb-b502-08f844a6b911\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.415319 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.440775 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.441410 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.442517 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.467830 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.471004 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-d97tj" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.471473 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-nnmst" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.473364 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.478091 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdl7v\" (UniqueName: \"kubernetes.io/projected/01015eb1-ac2f-4bc1-81d3-145ce402db5e-kube-api-access-bdl7v\") pod \"watcher-operator-controller-manager-769dc69bc-2dpvv\" (UID: \"01015eb1-ac2f-4bc1-81d3-145ce402db5e\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.478419 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tx5x\" (UniqueName: \"kubernetes.io/projected/e55320ae-8458-4802-aa07-e406f1b58fce-kube-api-access-5tx5x\") pod \"swift-operator-controller-manager-695b4bc5dc-jxvqs\" (UID: \"e55320ae-8458-4802-aa07-e406f1b58fce\") " pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.478505 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgkcq\" (UniqueName: \"kubernetes.io/projected/1177803f-ea41-40ed-8b1d-58c6761363f0-kube-api-access-qgkcq\") pod \"placement-operator-controller-manager-78f8948974-j5hhn\" (UID: \"1177803f-ea41-40ed-8b1d-58c6761363f0\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.478723 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkh6v\" (UniqueName: \"kubernetes.io/projected/8c138433-1126-4ffa-a017-19740e566084-kube-api-access-mkh6v\") pod \"test-operator-controller-manager-5854674fcc-zsxv4\" (UID: \"8c138433-1126-4ffa-a017-19740e566084\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.478768 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx7vc\" (UniqueName: \"kubernetes.io/projected/cf795d28-4ac2-44de-9ca7-10ef8788eb80-kube-api-access-gx7vc\") pod \"telemetry-operator-controller-manager-76cc84c6bb-llc6z\" (UID: \"cf795d28-4ac2-44de-9ca7-10ef8788eb80\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.550815 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.586074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkh6v\" (UniqueName: \"kubernetes.io/projected/8c138433-1126-4ffa-a017-19740e566084-kube-api-access-mkh6v\") pod \"test-operator-controller-manager-5854674fcc-zsxv4\" (UID: \"8c138433-1126-4ffa-a017-19740e566084\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.586148 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx7vc\" (UniqueName: \"kubernetes.io/projected/cf795d28-4ac2-44de-9ca7-10ef8788eb80-kube-api-access-gx7vc\") pod \"telemetry-operator-controller-manager-76cc84c6bb-llc6z\" (UID: \"cf795d28-4ac2-44de-9ca7-10ef8788eb80\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.586209 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.586261 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.600748 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdl7v\" (UniqueName: \"kubernetes.io/projected/01015eb1-ac2f-4bc1-81d3-145ce402db5e-kube-api-access-bdl7v\") pod \"watcher-operator-controller-manager-769dc69bc-2dpvv\" (UID: \"01015eb1-ac2f-4bc1-81d3-145ce402db5e\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.600855 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrw5c\" (UniqueName: \"kubernetes.io/projected/aa95a5ac-c9b5-4850-8201-b696ed655570-kube-api-access-wrw5c\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.607537 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tx5x\" (UniqueName: \"kubernetes.io/projected/e55320ae-8458-4802-aa07-e406f1b58fce-kube-api-access-5tx5x\") pod \"swift-operator-controller-manager-695b4bc5dc-jxvqs\" (UID: \"e55320ae-8458-4802-aa07-e406f1b58fce\") " pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.621239 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgkcq\" (UniqueName: \"kubernetes.io/projected/1177803f-ea41-40ed-8b1d-58c6761363f0-kube-api-access-qgkcq\") pod \"placement-operator-controller-manager-78f8948974-j5hhn\" (UID: \"1177803f-ea41-40ed-8b1d-58c6761363f0\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.621857 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.657944 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.676855 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdl7v\" (UniqueName: \"kubernetes.io/projected/01015eb1-ac2f-4bc1-81d3-145ce402db5e-kube-api-access-bdl7v\") pod \"watcher-operator-controller-manager-769dc69bc-2dpvv\" (UID: \"01015eb1-ac2f-4bc1-81d3-145ce402db5e\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.685783 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.688272 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkh6v\" (UniqueName: \"kubernetes.io/projected/8c138433-1126-4ffa-a017-19740e566084-kube-api-access-mkh6v\") pod \"test-operator-controller-manager-5854674fcc-zsxv4\" (UID: \"8c138433-1126-4ffa-a017-19740e566084\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.689642 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx7vc\" (UniqueName: \"kubernetes.io/projected/cf795d28-4ac2-44de-9ca7-10ef8788eb80-kube-api-access-gx7vc\") pod \"telemetry-operator-controller-manager-76cc84c6bb-llc6z\" (UID: \"cf795d28-4ac2-44de-9ca7-10ef8788eb80\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.690049 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.704362 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.704412 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.704454 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrw5c\" (UniqueName: \"kubernetes.io/projected/aa95a5ac-c9b5-4850-8201-b696ed655570-kube-api-access-wrw5c\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.705469 4685 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.705518 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:37.205503058 +0000 UTC m=+889.577277212 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "metrics-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.705574 4685 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.705596 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:37.205590331 +0000 UTC m=+889.577364485 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.711666 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.746419 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.768053 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrw5c\" (UniqueName: \"kubernetes.io/projected/aa95a5ac-c9b5-4850-8201-b696ed655570-kube-api-access-wrw5c\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.875393 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.897912 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.899155 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.904845 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-t9z58" Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.909749 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.910012 4685 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: E1202 10:16:36.910077 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert podName:50904294-59b3-4a71-84f9-8f171cad02e2 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:37.910058313 +0000 UTC m=+890.281832467 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" (UID: "50904294-59b3-4a71-84f9-8f171cad02e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.950393 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545"] Dec 02 10:16:36 crc kubenswrapper[4685]: I1202 10:16:36.966102 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml"] Dec 02 10:16:36 crc kubenswrapper[4685]: W1202 10:16:36.998695 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod901441b3_91d9_4edf_8955_cd5514589dec.slice/crio-0a989e92ae6c660eebb45b7bc105f23a6c6774560a1a6febba1b8d3b2c865e89 WatchSource:0}: Error finding container 0a989e92ae6c660eebb45b7bc105f23a6c6774560a1a6febba1b8d3b2c865e89: Status 404 returned error can't find the container with id 0a989e92ae6c660eebb45b7bc105f23a6c6774560a1a6febba1b8d3b2c865e89 Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.016113 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2fgz\" (UniqueName: \"kubernetes.io/projected/226039ae-3d4c-41e0-8a24-985eb9f63d27-kube-api-access-l2fgz\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5pwml\" (UID: \"226039ae-3d4c-41e0-8a24-985eb9f63d27\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.085192 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.133292 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2fgz\" (UniqueName: \"kubernetes.io/projected/226039ae-3d4c-41e0-8a24-985eb9f63d27-kube-api-access-l2fgz\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5pwml\" (UID: \"226039ae-3d4c-41e0-8a24-985eb9f63d27\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.187163 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2fgz\" (UniqueName: \"kubernetes.io/projected/226039ae-3d4c-41e0-8a24-985eb9f63d27-kube-api-access-l2fgz\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5pwml\" (UID: \"226039ae-3d4c-41e0-8a24-985eb9f63d27\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.188375 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.238642 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.238693 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.238837 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.238978 4685 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.239026 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:38.239011879 +0000 UTC m=+890.610786033 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "metrics-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.239208 4685 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.239241 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:38.239231525 +0000 UTC m=+890.611005679 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "webhook-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.239338 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.240150 4685 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.240215 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert podName:36189be8-91c8-4b60-90d6-050a07ae86d3 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:39.240194512 +0000 UTC m=+891.611968656 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert") pod "infra-operator-controller-manager-57548d458d-p62sl" (UID: "36189be8-91c8-4b60-90d6-050a07ae86d3") : secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.272455 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.290502 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k"] Dec 02 10:16:37 crc kubenswrapper[4685]: W1202 10:16:37.306408 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6513f83d_2079_477d_8976_68cb969806fe.slice/crio-51ac4ea5b5357b29cbfa09dacf76b34f448cead51a92c18bcc7c6c8f311110a1 WatchSource:0}: Error finding container 51ac4ea5b5357b29cbfa09dacf76b34f448cead51a92c18bcc7c6c8f311110a1: Status 404 returned error can't find the container with id 51ac4ea5b5357b29cbfa09dacf76b34f448cead51a92c18bcc7c6c8f311110a1 Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.349470 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl"] Dec 02 10:16:37 crc kubenswrapper[4685]: W1202 10:16:37.391926 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod544b6fe9_890e_4b17_8f8e_55f53d64fcf7.slice/crio-35be4a5ae662160510e016304760514320913ede06588fb5053f5ea3193e8bf6 WatchSource:0}: Error finding container 35be4a5ae662160510e016304760514320913ede06588fb5053f5ea3193e8bf6: Status 404 returned error can't find the container with id 35be4a5ae662160510e016304760514320913ede06588fb5053f5ea3193e8bf6 Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.720651 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.729748 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.766730 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" event={"ID":"739ae795-6209-4881-8bf6-be391a810a86","Type":"ContainerStarted","Data":"3375fe5f51c69c31eddacb19d418b01d45786f4d85e52786c77b09746abe0659"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.783304 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" event={"ID":"6513f83d-2079-477d-8976-68cb969806fe","Type":"ContainerStarted","Data":"51ac4ea5b5357b29cbfa09dacf76b34f448cead51a92c18bcc7c6c8f311110a1"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.785264 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" event={"ID":"544b6fe9-890e-4b17-8f8e-55f53d64fcf7","Type":"ContainerStarted","Data":"35be4a5ae662160510e016304760514320913ede06588fb5053f5ea3193e8bf6"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.786508 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" event={"ID":"dd4f4fd1-38b6-4732-bab1-96a522d34e53","Type":"ContainerStarted","Data":"bf6f3785d9d127118516b789c72a089e513fd528dfc4a950284acd6d960739f0"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.787742 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" event={"ID":"0a4bb15b-4ada-4698-9747-dfa600f319d3","Type":"ContainerStarted","Data":"b92a93e8d82d734f9e339e192e3ba570ac153247c4a0fb361a734bdac94eefaf"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.792158 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.805166 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl"] Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.805382 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" event={"ID":"b16756f6-29e0-4e33-8e00-f7b0e193b958","Type":"ContainerStarted","Data":"3f0465f6566d6137283ba361c25a81b93ba6c47375f2c65e5c17477dda7ddb58"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.807837 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" event={"ID":"cccf3baf-f063-4962-8856-c80c78439b82","Type":"ContainerStarted","Data":"fbfbd4ef1df1eb595ce8ff65349873f16555eafe74c63bceeda2515c22c0670f"} Dec 02 10:16:37 crc kubenswrapper[4685]: W1202 10:16:37.808604 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb0c5cd2_9459_4e31_8613_f758d330dce2.slice/crio-6547bae3b165864d910faa33d56ec18fb64cec0eea327e0b76133daaf4b68194 WatchSource:0}: Error finding container 6547bae3b165864d910faa33d56ec18fb64cec0eea327e0b76133daaf4b68194: Status 404 returned error can't find the container with id 6547bae3b165864d910faa33d56ec18fb64cec0eea327e0b76133daaf4b68194 Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.810858 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" event={"ID":"901441b3-91d9-4edf-8955-cd5514589dec","Type":"ContainerStarted","Data":"0a989e92ae6c660eebb45b7bc105f23a6c6774560a1a6febba1b8d3b2c865e89"} Dec 02 10:16:37 crc kubenswrapper[4685]: I1202 10:16:37.959520 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.959700 4685 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:37 crc kubenswrapper[4685]: E1202 10:16:37.959833 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert podName:50904294-59b3-4a71-84f9-8f171cad02e2 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:39.959812047 +0000 UTC m=+892.331586201 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" (UID: "50904294-59b3-4a71-84f9-8f171cad02e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.134221 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.149134 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z"] Dec 02 10:16:38 crc kubenswrapper[4685]: W1202 10:16:38.229414 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c138433_1126_4ffa_a017_19740e566084.slice/crio-8fe999e1b497966980dcd6b1b659fbf8e3f1d4c567e844694685047cb3588224 WatchSource:0}: Error finding container 8fe999e1b497966980dcd6b1b659fbf8e3f1d4c567e844694685047cb3588224: Status 404 returned error can't find the container with id 8fe999e1b497966980dcd6b1b659fbf8e3f1d4c567e844694685047cb3588224 Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.259839 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.267912 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.267961 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.268174 4685 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.268212 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:40.268198952 +0000 UTC m=+892.639973106 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "webhook-server-cert" not found Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.268246 4685 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.268263 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:40.268257474 +0000 UTC m=+892.640031628 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "metrics-server-cert" not found Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.287674 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qgkcq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-j5hhn_openstack-operators(1177803f-ea41-40ed-8b1d-58c6761363f0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.290287 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qgkcq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-j5hhn_openstack-operators(1177803f-ea41-40ed-8b1d-58c6761363f0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.292172 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" podUID="1177803f-ea41-40ed-8b1d-58c6761363f0" Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.296699 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.304793 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.325397 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.401212 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.412366 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv"] Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.422591 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml"] Dec 02 10:16:38 crc kubenswrapper[4685]: W1202 10:16:38.426844 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode55320ae_8458_4802_aa07_e406f1b58fce.slice/crio-3f026bdfe3693506693f35142672d653a6f53946bb8e7a78f8156a4b9a68c4ee WatchSource:0}: Error finding container 3f026bdfe3693506693f35142672d653a6f53946bb8e7a78f8156a4b9a68c4ee: Status 404 returned error can't find the container with id 3f026bdfe3693506693f35142672d653a6f53946bb8e7a78f8156a4b9a68c4ee Dec 02 10:16:38 crc kubenswrapper[4685]: W1202 10:16:38.428848 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01015eb1_ac2f_4bc1_81d3_145ce402db5e.slice/crio-4a92a0b0c93b3affd5b8fe41027d1f34e493c19057577510326a8fd8a344fc3b WatchSource:0}: Error finding container 4a92a0b0c93b3affd5b8fe41027d1f34e493c19057577510326a8fd8a344fc3b: Status 404 returned error can't find the container with id 4a92a0b0c93b3affd5b8fe41027d1f34e493c19057577510326a8fd8a344fc3b Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.435741 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bdl7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-2dpvv_openstack-operators(01015eb1-ac2f-4bc1-81d3-145ce402db5e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.440876 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bdl7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-2dpvv_openstack-operators(01015eb1-ac2f-4bc1-81d3-145ce402db5e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.442007 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" podUID="01015eb1-ac2f-4bc1-81d3-145ce402db5e" Dec 02 10:16:38 crc kubenswrapper[4685]: W1202 10:16:38.443716 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod226039ae_3d4c_41e0_8a24_985eb9f63d27.slice/crio-04848d84b3fed1ff2c3a9790f12b51487799e17a2b616204ca711494bdd7842c WatchSource:0}: Error finding container 04848d84b3fed1ff2c3a9790f12b51487799e17a2b616204ca711494bdd7842c: Status 404 returned error can't find the container with id 04848d84b3fed1ff2c3a9790f12b51487799e17a2b616204ca711494bdd7842c Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.462518 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l2fgz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-5pwml_openstack-operators(226039ae-3d4c-41e0-8a24-985eb9f63d27): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.464277 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" podUID="226039ae-3d4c-41e0-8a24-985eb9f63d27" Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.822991 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" event={"ID":"30be1eaf-4d63-4fb6-9372-0857432b6b73","Type":"ContainerStarted","Data":"b8ad6a5c7c5188a44b944865c508308600b69818e700f7bba13d173c442b06d7"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.824278 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" event={"ID":"e55320ae-8458-4802-aa07-e406f1b58fce","Type":"ContainerStarted","Data":"3f026bdfe3693506693f35142672d653a6f53946bb8e7a78f8156a4b9a68c4ee"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.825199 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" event={"ID":"8c138433-1126-4ffa-a017-19740e566084","Type":"ContainerStarted","Data":"8fe999e1b497966980dcd6b1b659fbf8e3f1d4c567e844694685047cb3588224"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.827724 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" event={"ID":"cf795d28-4ac2-44de-9ca7-10ef8788eb80","Type":"ContainerStarted","Data":"f2966c33e656ade8e147aed5272d8034c95ade1d65fb14de9761e381b7ebf897"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.828452 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" event={"ID":"bb0c5cd2-9459-4e31-8613-f758d330dce2","Type":"ContainerStarted","Data":"6547bae3b165864d910faa33d56ec18fb64cec0eea327e0b76133daaf4b68194"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.829630 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" event={"ID":"1177803f-ea41-40ed-8b1d-58c6761363f0","Type":"ContainerStarted","Data":"c459f35fc668a0c97b84bb61aa3fd6f45e5cf36388be0f6b13bd7431d81051d5"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.831222 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" event={"ID":"bc4b6647-31ea-45d0-ac59-b8b1cef80aeb","Type":"ContainerStarted","Data":"ad46717760e1bb5b880780ee7e4f9a86124b122481cd067422b1a676b57a28ea"} Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.832949 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" podUID="1177803f-ea41-40ed-8b1d-58c6761363f0" Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.833235 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" event={"ID":"01015eb1-ac2f-4bc1-81d3-145ce402db5e","Type":"ContainerStarted","Data":"4a92a0b0c93b3affd5b8fe41027d1f34e493c19057577510326a8fd8a344fc3b"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.834891 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" event={"ID":"20b551a4-85bc-4ecb-b502-08f844a6b911","Type":"ContainerStarted","Data":"ea3e463ece62ba9f585eaa9afed257fdb29192821bf48a7f6b0be5ef4f89c9b1"} Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.835090 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" podUID="01015eb1-ac2f-4bc1-81d3-145ce402db5e" Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.840703 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" event={"ID":"226039ae-3d4c-41e0-8a24-985eb9f63d27","Type":"ContainerStarted","Data":"04848d84b3fed1ff2c3a9790f12b51487799e17a2b616204ca711494bdd7842c"} Dec 02 10:16:38 crc kubenswrapper[4685]: E1202 10:16:38.842842 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" podUID="226039ae-3d4c-41e0-8a24-985eb9f63d27" Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.843802 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" event={"ID":"e88f923e-f7cc-4292-a49c-483be1e7848e","Type":"ContainerStarted","Data":"b44d732096c30c24098f269da86a3fcddc7e01ab07816162cd897be274ee8788"} Dec 02 10:16:38 crc kubenswrapper[4685]: I1202 10:16:38.844799 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" event={"ID":"59c1b39b-2153-4c88-9229-0e951b086fdd","Type":"ContainerStarted","Data":"b3d954029463b8e33737f93d415ef390187fb14e7642787495463816d85a766e"} Dec 02 10:16:39 crc kubenswrapper[4685]: I1202 10:16:39.289959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:39 crc kubenswrapper[4685]: E1202 10:16:39.290141 4685 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:39 crc kubenswrapper[4685]: E1202 10:16:39.290198 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert podName:36189be8-91c8-4b60-90d6-050a07ae86d3 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:43.290176975 +0000 UTC m=+895.661951129 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert") pod "infra-operator-controller-manager-57548d458d-p62sl" (UID: "36189be8-91c8-4b60-90d6-050a07ae86d3") : secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:39 crc kubenswrapper[4685]: E1202 10:16:39.854403 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" podUID="226039ae-3d4c-41e0-8a24-985eb9f63d27" Dec 02 10:16:39 crc kubenswrapper[4685]: E1202 10:16:39.854662 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" podUID="1177803f-ea41-40ed-8b1d-58c6761363f0" Dec 02 10:16:39 crc kubenswrapper[4685]: E1202 10:16:39.855011 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" podUID="01015eb1-ac2f-4bc1-81d3-145ce402db5e" Dec 02 10:16:40 crc kubenswrapper[4685]: I1202 10:16:40.002587 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:40 crc kubenswrapper[4685]: E1202 10:16:40.002780 4685 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:40 crc kubenswrapper[4685]: E1202 10:16:40.003013 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert podName:50904294-59b3-4a71-84f9-8f171cad02e2 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:44.00299246 +0000 UTC m=+896.374766614 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" (UID: "50904294-59b3-4a71-84f9-8f171cad02e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:40 crc kubenswrapper[4685]: I1202 10:16:40.306837 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:40 crc kubenswrapper[4685]: I1202 10:16:40.306908 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:40 crc kubenswrapper[4685]: E1202 10:16:40.307157 4685 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 10:16:40 crc kubenswrapper[4685]: E1202 10:16:40.307216 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:44.30719871 +0000 UTC m=+896.678972864 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "webhook-server-cert" not found Dec 02 10:16:40 crc kubenswrapper[4685]: E1202 10:16:40.307629 4685 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 10:16:40 crc kubenswrapper[4685]: E1202 10:16:40.307664 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:44.307653712 +0000 UTC m=+896.679427876 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "metrics-server-cert" not found Dec 02 10:16:42 crc kubenswrapper[4685]: I1202 10:16:42.148249 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:16:42 crc kubenswrapper[4685]: I1202 10:16:42.148642 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:16:43 crc kubenswrapper[4685]: I1202 10:16:43.373205 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:43 crc kubenswrapper[4685]: E1202 10:16:43.373357 4685 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:43 crc kubenswrapper[4685]: E1202 10:16:43.373528 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert podName:36189be8-91c8-4b60-90d6-050a07ae86d3 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:51.373508296 +0000 UTC m=+903.745282450 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert") pod "infra-operator-controller-manager-57548d458d-p62sl" (UID: "36189be8-91c8-4b60-90d6-050a07ae86d3") : secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:44 crc kubenswrapper[4685]: I1202 10:16:44.085922 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:44 crc kubenswrapper[4685]: E1202 10:16:44.086197 4685 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:44 crc kubenswrapper[4685]: E1202 10:16:44.086265 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert podName:50904294-59b3-4a71-84f9-8f171cad02e2 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:52.08624634 +0000 UTC m=+904.458020494 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" (UID: "50904294-59b3-4a71-84f9-8f171cad02e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:44 crc kubenswrapper[4685]: I1202 10:16:44.390081 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:44 crc kubenswrapper[4685]: I1202 10:16:44.390143 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:44 crc kubenswrapper[4685]: E1202 10:16:44.390370 4685 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 02 10:16:44 crc kubenswrapper[4685]: E1202 10:16:44.390429 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:52.390410627 +0000 UTC m=+904.762184781 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "webhook-server-cert" not found Dec 02 10:16:44 crc kubenswrapper[4685]: E1202 10:16:44.390719 4685 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 02 10:16:44 crc kubenswrapper[4685]: E1202 10:16:44.390752 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs podName:aa95a5ac-c9b5-4850-8201-b696ed655570 nodeName:}" failed. No retries permitted until 2025-12-02 10:16:52.390743606 +0000 UTC m=+904.762517760 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs") pod "openstack-operator-controller-manager-59c586c68c-cvbtm" (UID: "aa95a5ac-c9b5-4850-8201-b696ed655570") : secret "metrics-server-cert" not found Dec 02 10:16:51 crc kubenswrapper[4685]: I1202 10:16:51.398295 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:16:51 crc kubenswrapper[4685]: E1202 10:16:51.398498 4685 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:51 crc kubenswrapper[4685]: E1202 10:16:51.399032 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert podName:36189be8-91c8-4b60-90d6-050a07ae86d3 nodeName:}" failed. No retries permitted until 2025-12-02 10:17:07.399012065 +0000 UTC m=+919.770786219 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert") pod "infra-operator-controller-manager-57548d458d-p62sl" (UID: "36189be8-91c8-4b60-90d6-050a07ae86d3") : secret "infra-operator-webhook-server-cert" not found Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.107301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:16:52 crc kubenswrapper[4685]: E1202 10:16:52.107470 4685 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:52 crc kubenswrapper[4685]: E1202 10:16:52.107545 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert podName:50904294-59b3-4a71-84f9-8f171cad02e2 nodeName:}" failed. No retries permitted until 2025-12-02 10:17:08.107526081 +0000 UTC m=+920.479300245 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" (UID: "50904294-59b3-4a71-84f9-8f171cad02e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.412312 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.412386 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.418608 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-webhook-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.419478 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aa95a5ac-c9b5-4850-8201-b696ed655570-metrics-certs\") pod \"openstack-operator-controller-manager-59c586c68c-cvbtm\" (UID: \"aa95a5ac-c9b5-4850-8201-b696ed655570\") " pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.608013 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-nnmst" Dec 02 10:16:52 crc kubenswrapper[4685]: I1202 10:16:52.617160 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:16:55 crc kubenswrapper[4685]: E1202 10:16:55.549014 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 02 10:16:55 crc kubenswrapper[4685]: E1202 10:16:55.549790 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d6cnk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-vz7bq_openstack-operators(bb0c5cd2-9459-4e31-8613-f758d330dce2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:16:57 crc kubenswrapper[4685]: E1202 10:16:57.035816 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 02 10:16:57 crc kubenswrapper[4685]: E1202 10:16:57.036285 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-glmk5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-njgr6_openstack-operators(30be1eaf-4d63-4fb6-9372-0857432b6b73): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:00 crc kubenswrapper[4685]: E1202 10:17:00.765207 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809" Dec 02 10:17:00 crc kubenswrapper[4685]: E1202 10:17:00.765704 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8s2vp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-vn545_openstack-operators(901441b3-91d9-4edf-8955-cd5514589dec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:01 crc kubenswrapper[4685]: E1202 10:17:01.537147 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 02 10:17:01 crc kubenswrapper[4685]: E1202 10:17:01.537366 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q7hxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-7qgfl_openstack-operators(544b6fe9-890e-4b17-8f8e-55f53d64fcf7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:03 crc kubenswrapper[4685]: E1202 10:17:03.435071 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 02 10:17:03 crc kubenswrapper[4685]: E1202 10:17:03.436225 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4nvdm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-xnk7k_openstack-operators(739ae795-6209-4881-8bf6-be391a810a86): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:03 crc kubenswrapper[4685]: E1202 10:17:03.510753 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.83:5001/openstack-k8s-operators/swift-operator:aba33380a9710d7813c0b0075ae9fe5836b97a6f" Dec 02 10:17:03 crc kubenswrapper[4685]: E1202 10:17:03.510805 4685 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.83:5001/openstack-k8s-operators/swift-operator:aba33380a9710d7813c0b0075ae9fe5836b97a6f" Dec 02 10:17:03 crc kubenswrapper[4685]: E1202 10:17:03.510958 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.83:5001/openstack-k8s-operators/swift-operator:aba33380a9710d7813c0b0075ae9fe5836b97a6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5tx5x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-695b4bc5dc-jxvqs_openstack-operators(e55320ae-8458-4802-aa07-e406f1b58fce): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:04 crc kubenswrapper[4685]: E1202 10:17:04.430457 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 02 10:17:04 crc kubenswrapper[4685]: E1202 10:17:04.430683 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m2t66,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-rngtk_openstack-operators(59c1b39b-2153-4c88-9229-0e951b086fdd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:06 crc kubenswrapper[4685]: E1202 10:17:06.927984 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 02 10:17:06 crc kubenswrapper[4685]: E1202 10:17:06.928677 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkh6v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-zsxv4_openstack-operators(8c138433-1126-4ffa-a017-19740e566084): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:07 crc kubenswrapper[4685]: I1202 10:17:07.464574 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:17:07 crc kubenswrapper[4685]: I1202 10:17:07.474929 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/36189be8-91c8-4b60-90d6-050a07ae86d3-cert\") pod \"infra-operator-controller-manager-57548d458d-p62sl\" (UID: \"36189be8-91c8-4b60-90d6-050a07ae86d3\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:17:07 crc kubenswrapper[4685]: I1202 10:17:07.527019 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-gxv9j" Dec 02 10:17:07 crc kubenswrapper[4685]: I1202 10:17:07.535253 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:17:08 crc kubenswrapper[4685]: E1202 10:17:08.003650 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 02 10:17:08 crc kubenswrapper[4685]: E1202 10:17:08.003848 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dtc5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-xqspp_openstack-operators(b16756f6-29e0-4e33-8e00-f7b0e193b958): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:08 crc kubenswrapper[4685]: I1202 10:17:08.174919 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:17:08 crc kubenswrapper[4685]: I1202 10:17:08.178282 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50904294-59b3-4a71-84f9-8f171cad02e2-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd49v57k\" (UID: \"50904294-59b3-4a71-84f9-8f171cad02e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:17:08 crc kubenswrapper[4685]: I1202 10:17:08.310337 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-zc6q7" Dec 02 10:17:08 crc kubenswrapper[4685]: I1202 10:17:08.318307 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:17:08 crc kubenswrapper[4685]: E1202 10:17:08.807951 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 02 10:17:08 crc kubenswrapper[4685]: E1202 10:17:08.808148 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gfsln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-7tmr8_openstack-operators(0a4bb15b-4ada-4698-9747-dfa600f319d3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:11 crc kubenswrapper[4685]: E1202 10:17:11.215135 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 02 10:17:11 crc kubenswrapper[4685]: E1202 10:17:11.215439 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tqrgp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-vx7jq_openstack-operators(20b551a4-85bc-4ecb-b502-08f844a6b911): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:11 crc kubenswrapper[4685]: E1202 10:17:11.702352 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85" Dec 02 10:17:11 crc kubenswrapper[4685]: E1202 10:17:11.702874 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:9f68d7bc8c6bce38f46dee8a8272d5365c49fe7b32b2af52e8ac884e212f3a85,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wjqgh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-9s2nf_openstack-operators(dd4f4fd1-38b6-4732-bab1-96a522d34e53): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:12 crc kubenswrapper[4685]: I1202 10:17:12.148336 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:17:12 crc kubenswrapper[4685]: I1202 10:17:12.148418 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:17:12 crc kubenswrapper[4685]: I1202 10:17:12.148476 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:17:12 crc kubenswrapper[4685]: I1202 10:17:12.149539 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"655120d7a1362181f84e921b333c967e14aabc7193e6f3de840afaf3bc97e69a"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:17:12 crc kubenswrapper[4685]: I1202 10:17:12.149624 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://655120d7a1362181f84e921b333c967e14aabc7193e6f3de840afaf3bc97e69a" gracePeriod=600 Dec 02 10:17:13 crc kubenswrapper[4685]: I1202 10:17:13.130544 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="655120d7a1362181f84e921b333c967e14aabc7193e6f3de840afaf3bc97e69a" exitCode=0 Dec 02 10:17:13 crc kubenswrapper[4685]: I1202 10:17:13.130850 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"655120d7a1362181f84e921b333c967e14aabc7193e6f3de840afaf3bc97e69a"} Dec 02 10:17:13 crc kubenswrapper[4685]: I1202 10:17:13.130883 4685 scope.go:117] "RemoveContainer" containerID="3a4bcb8208e258f079f3fb221ee480a662e75e3d7ac311aacc2db0117f88c73a" Dec 02 10:17:13 crc kubenswrapper[4685]: E1202 10:17:13.175414 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621" Dec 02 10:17:13 crc kubenswrapper[4685]: E1202 10:17:13.175647 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bdl7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-2dpvv_openstack-operators(01015eb1-ac2f-4bc1-81d3-145ce402db5e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:16 crc kubenswrapper[4685]: E1202 10:17:16.732365 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 02 10:17:16 crc kubenswrapper[4685]: E1202 10:17:16.733478 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qgkcq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-j5hhn_openstack-operators(1177803f-ea41-40ed-8b1d-58c6761363f0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:16 crc kubenswrapper[4685]: I1202 10:17:16.736519 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:17:20 crc kubenswrapper[4685]: E1202 10:17:20.754626 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 02 10:17:20 crc kubenswrapper[4685]: E1202 10:17:20.755317 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7n8nk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-bqctl_openstack-operators(e88f923e-f7cc-4292-a49c-483be1e7848e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:21 crc kubenswrapper[4685]: I1202 10:17:21.676628 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm"] Dec 02 10:17:24 crc kubenswrapper[4685]: E1202 10:17:24.182960 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 02 10:17:24 crc kubenswrapper[4685]: E1202 10:17:24.183420 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vzm87,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-5sn8t_openstack-operators(6513f83d-2079-477d-8976-68cb969806fe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:24 crc kubenswrapper[4685]: I1202 10:17:24.232042 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" event={"ID":"aa95a5ac-c9b5-4850-8201-b696ed655570","Type":"ContainerStarted","Data":"51b8852906aa350f74c6ad4ce927b55fbc592962e7a40787bed5f223f564f19e"} Dec 02 10:17:25 crc kubenswrapper[4685]: I1202 10:17:25.457412 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k"] Dec 02 10:17:25 crc kubenswrapper[4685]: W1202 10:17:25.675301 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod50904294_59b3_4a71_84f9_8f171cad02e2.slice/crio-26d3cda78ed92d79c8e7af85eba1f8a118174c0867b7997d74104725f905f870 WatchSource:0}: Error finding container 26d3cda78ed92d79c8e7af85eba1f8a118174c0867b7997d74104725f905f870: Status 404 returned error can't find the container with id 26d3cda78ed92d79c8e7af85eba1f8a118174c0867b7997d74104725f905f870 Dec 02 10:17:25 crc kubenswrapper[4685]: I1202 10:17:25.716974 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-p62sl"] Dec 02 10:17:25 crc kubenswrapper[4685]: W1202 10:17:25.945928 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36189be8_91c8_4b60_90d6_050a07ae86d3.slice/crio-db10a85b1f1f2c3048d12f0ef2c2580c73209a9fdf7b26c6e9cd86cc7d94ac5d WatchSource:0}: Error finding container db10a85b1f1f2c3048d12f0ef2c2580c73209a9fdf7b26c6e9cd86cc7d94ac5d: Status 404 returned error can't find the container with id db10a85b1f1f2c3048d12f0ef2c2580c73209a9fdf7b26c6e9cd86cc7d94ac5d Dec 02 10:17:26 crc kubenswrapper[4685]: E1202 10:17:26.083231 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 10:17:26 crc kubenswrapper[4685]: E1202 10:17:26.083433 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wjqgh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-9s2nf_openstack-operators(dd4f4fd1-38b6-4732-bab1-96a522d34e53): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 02 10:17:26 crc kubenswrapper[4685]: E1202 10:17:26.085810 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" podUID="dd4f4fd1-38b6-4732-bab1-96a522d34e53" Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.253684 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"a4f65c3934f925bd4944d91a313fa6182d1454a9f7eaa524038fcf17bffe22c2"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.256376 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" event={"ID":"50904294-59b3-4a71-84f9-8f171cad02e2","Type":"ContainerStarted","Data":"26d3cda78ed92d79c8e7af85eba1f8a118174c0867b7997d74104725f905f870"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.258057 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" event={"ID":"36189be8-91c8-4b60-90d6-050a07ae86d3","Type":"ContainerStarted","Data":"db10a85b1f1f2c3048d12f0ef2c2580c73209a9fdf7b26c6e9cd86cc7d94ac5d"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.260208 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" event={"ID":"cf795d28-4ac2-44de-9ca7-10ef8788eb80","Type":"ContainerStarted","Data":"738a6dbda4be16f958b4c96989f233b9aaeafb9290436dc62556c78669e8b18e"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.261497 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" event={"ID":"aa95a5ac-c9b5-4850-8201-b696ed655570","Type":"ContainerStarted","Data":"04351e48b2c3a6fdd7cb80028562f6ca1a2b519391089c020e50bec4a407f01b"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.261755 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.262500 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" event={"ID":"cccf3baf-f063-4962-8856-c80c78439b82","Type":"ContainerStarted","Data":"46ff6b8d100639e9a44c2a124ebbc5b69e8aa8735dfca4235604fa0fd2615b7a"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.263495 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" event={"ID":"bc4b6647-31ea-45d0-ac59-b8b1cef80aeb","Type":"ContainerStarted","Data":"d23fea71b209e8b9e3c35e1331fd725f0a06e6d6b29aca480d1d494057e61935"} Dec 02 10:17:26 crc kubenswrapper[4685]: I1202 10:17:26.382456 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" podStartSLOduration=50.382438642 podStartE2EDuration="50.382438642s" podCreationTimestamp="2025-12-02 10:16:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:17:26.378192505 +0000 UTC m=+938.749966659" watchObservedRunningTime="2025-12-02 10:17:26.382438642 +0000 UTC m=+938.754212796" Dec 02 10:17:27 crc kubenswrapper[4685]: E1202 10:17:27.458632 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 02 10:17:27 crc kubenswrapper[4685]: E1202 10:17:27.459063 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l2fgz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-5pwml_openstack-operators(226039ae-3d4c-41e0-8a24-985eb9f63d27): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:27 crc kubenswrapper[4685]: E1202 10:17:27.460230 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" podUID="226039ae-3d4c-41e0-8a24-985eb9f63d27" Dec 02 10:17:29 crc kubenswrapper[4685]: E1202 10:17:29.000805 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 10:17:29 crc kubenswrapper[4685]: E1202 10:17:29.001315 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gfsln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-7tmr8_openstack-operators(0a4bb15b-4ada-4698-9747-dfa600f319d3): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 02 10:17:29 crc kubenswrapper[4685]: E1202 10:17:29.002510 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" podUID="0a4bb15b-4ada-4698-9747-dfa600f319d3" Dec 02 10:17:30 crc kubenswrapper[4685]: I1202 10:17:30.302199 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" event={"ID":"e88f923e-f7cc-4292-a49c-483be1e7848e","Type":"ContainerStarted","Data":"30056b8f4827c7fa947ac96d3f39749f23420ce379096756b118af6c7ca140ac"} Dec 02 10:17:31 crc kubenswrapper[4685]: E1202 10:17:31.663014 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" podUID="e88f923e-f7cc-4292-a49c-483be1e7848e" Dec 02 10:17:32 crc kubenswrapper[4685]: I1202 10:17:32.317367 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" event={"ID":"1177803f-ea41-40ed-8b1d-58c6761363f0","Type":"ContainerStarted","Data":"39b7d4607cc679da6b28c86bd17e613f01be14ea38351e4c4369b0d6dc6823d3"} Dec 02 10:17:32 crc kubenswrapper[4685]: E1202 10:17:32.385858 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" podUID="1177803f-ea41-40ed-8b1d-58c6761363f0" Dec 02 10:17:32 crc kubenswrapper[4685]: I1202 10:17:32.626115 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-59c586c68c-cvbtm" Dec 02 10:17:33 crc kubenswrapper[4685]: E1202 10:17:33.235414 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 10:17:33 crc kubenswrapper[4685]: E1202 10:17:33.235923 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m2t66,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-rngtk_openstack-operators(59c1b39b-2153-4c88-9229-0e951b086fdd): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 02 10:17:33 crc kubenswrapper[4685]: E1202 10:17:33.237398 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"]" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" podUID="59c1b39b-2153-4c88-9229-0e951b086fdd" Dec 02 10:17:33 crc kubenswrapper[4685]: I1202 10:17:33.325123 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" event={"ID":"cf795d28-4ac2-44de-9ca7-10ef8788eb80","Type":"ContainerStarted","Data":"a706c5e70d66af01cd27b23861016a7b18954930365b7908fc31fccba1c01d6c"} Dec 02 10:17:33 crc kubenswrapper[4685]: E1202 10:17:33.326986 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" podUID="1177803f-ea41-40ed-8b1d-58c6761363f0" Dec 02 10:17:33 crc kubenswrapper[4685]: I1202 10:17:33.366362 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" podStartSLOduration=4.091643498 podStartE2EDuration="58.366342654s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.143603325 +0000 UTC m=+890.515377479" lastFinishedPulling="2025-12-02 10:17:32.418302481 +0000 UTC m=+944.790076635" observedRunningTime="2025-12-02 10:17:33.36620021 +0000 UTC m=+945.737974374" watchObservedRunningTime="2025-12-02 10:17:33.366342654 +0000 UTC m=+945.738116828" Dec 02 10:17:34 crc kubenswrapper[4685]: E1202 10:17:34.029136 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 02 10:17:34 crc kubenswrapper[4685]: E1202 10:17:34.029595 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d6cnk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-vz7bq_openstack-operators(bb0c5cd2-9459-4e31-8613-f758d330dce2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:17:34 crc kubenswrapper[4685]: E1202 10:17:34.030842 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" podUID="bb0c5cd2-9459-4e31-8613-f758d330dce2" Dec 02 10:17:34 crc kubenswrapper[4685]: I1202 10:17:34.334671 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" event={"ID":"dd4f4fd1-38b6-4732-bab1-96a522d34e53","Type":"ContainerStarted","Data":"04a20a625996a7502541d5abc6e5f37d3eed5e4c861f6c26d5fa14459acfcda9"} Dec 02 10:17:34 crc kubenswrapper[4685]: I1202 10:17:34.335114 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:17:34 crc kubenswrapper[4685]: I1202 10:17:34.337113 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-llc6z" Dec 02 10:17:35 crc kubenswrapper[4685]: I1202 10:17:35.342578 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" event={"ID":"bc4b6647-31ea-45d0-ac59-b8b1cef80aeb","Type":"ContainerStarted","Data":"05325b65620c4643bb2b092e91065ac505e6be6e5edea40d020f81608e49a152"} Dec 02 10:17:35 crc kubenswrapper[4685]: I1202 10:17:35.344826 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" event={"ID":"544b6fe9-890e-4b17-8f8e-55f53d64fcf7","Type":"ContainerStarted","Data":"dad03140d1a547cb29b2c05fefafb5c61837a1abb19ba783358dcc5f037a2b71"} Dec 02 10:17:35 crc kubenswrapper[4685]: I1202 10:17:35.347428 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" event={"ID":"dd4f4fd1-38b6-4732-bab1-96a522d34e53","Type":"ContainerStarted","Data":"c3ab40c441bab5755f9ce6e63915feb7f8be8264808990caed1aff2de64ec02c"} Dec 02 10:17:35 crc kubenswrapper[4685]: E1202 10:17:35.613209 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" podUID="544b6fe9-890e-4b17-8f8e-55f53d64fcf7" Dec 02 10:17:36 crc kubenswrapper[4685]: I1202 10:17:36.353171 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:17:36 crc kubenswrapper[4685]: I1202 10:17:36.369932 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" podStartSLOduration=4.782506609 podStartE2EDuration="1m1.369917701s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.031969165 +0000 UTC m=+889.403743319" lastFinishedPulling="2025-12-02 10:17:33.619380257 +0000 UTC m=+945.991154411" observedRunningTime="2025-12-02 10:17:36.366887197 +0000 UTC m=+948.738661371" watchObservedRunningTime="2025-12-02 10:17:36.369917701 +0000 UTC m=+948.741691855" Dec 02 10:17:36 crc kubenswrapper[4685]: I1202 10:17:36.444042 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" podStartSLOduration=4.713287941 podStartE2EDuration="1m1.444021159s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.761193396 +0000 UTC m=+890.132967550" lastFinishedPulling="2025-12-02 10:17:34.491926614 +0000 UTC m=+946.863700768" observedRunningTime="2025-12-02 10:17:36.431851612 +0000 UTC m=+948.803625766" watchObservedRunningTime="2025-12-02 10:17:36.444021159 +0000 UTC m=+948.815795313" Dec 02 10:17:40 crc kubenswrapper[4685]: E1202 10:17:40.625178 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" podUID="226039ae-3d4c-41e0-8a24-985eb9f63d27" Dec 02 10:17:45 crc kubenswrapper[4685]: I1202 10:17:45.530516 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-9s2nf" Dec 02 10:17:46 crc kubenswrapper[4685]: I1202 10:17:46.128160 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:17:46 crc kubenswrapper[4685]: I1202 10:17:46.130687 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9l2jq" Dec 02 10:17:46 crc kubenswrapper[4685]: E1202 10:17:46.982143 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" podUID="30be1eaf-4d63-4fb6-9372-0857432b6b73" Dec 02 10:17:47 crc kubenswrapper[4685]: I1202 10:17:47.428287 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" event={"ID":"cccf3baf-f063-4962-8856-c80c78439b82","Type":"ContainerStarted","Data":"a0dace2f24ffcee2b7701f2e00fdf8fa40e203a72cf6e4b94ec51b73d740719c"} Dec 02 10:17:47 crc kubenswrapper[4685]: I1202 10:17:47.428534 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:17:47 crc kubenswrapper[4685]: I1202 10:17:47.431101 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" Dec 02 10:17:47 crc kubenswrapper[4685]: I1202 10:17:47.431150 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" event={"ID":"0a4bb15b-4ada-4698-9747-dfa600f319d3","Type":"ContainerStarted","Data":"4b8f1fcbb3cae2b36b94fa96de4aa1918e9653862458e14d309aa7f37e32bda1"} Dec 02 10:17:47 crc kubenswrapper[4685]: I1202 10:17:47.441112 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" event={"ID":"30be1eaf-4d63-4fb6-9372-0857432b6b73","Type":"ContainerStarted","Data":"889dbffd89af25fba52a7d80c0e90dc7dc949ebd9315e5f46cb12d5dec823e70"} Dec 02 10:17:47 crc kubenswrapper[4685]: I1202 10:17:47.452846 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-855qm" podStartSLOduration=4.441362325 podStartE2EDuration="1m12.452824547s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.270574975 +0000 UTC m=+889.642349119" lastFinishedPulling="2025-12-02 10:17:45.282037197 +0000 UTC m=+957.653811341" observedRunningTime="2025-12-02 10:17:47.449116124 +0000 UTC m=+959.820890278" watchObservedRunningTime="2025-12-02 10:17:47.452824547 +0000 UTC m=+959.824598701" Dec 02 10:17:48 crc kubenswrapper[4685]: I1202 10:17:48.448356 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" event={"ID":"e88f923e-f7cc-4292-a49c-483be1e7848e","Type":"ContainerStarted","Data":"24276700dfcba56b57ee06c19dd2b3385780f2a1411b95eb3ae854d6ecc11dd9"} Dec 02 10:17:48 crc kubenswrapper[4685]: I1202 10:17:48.468849 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" podStartSLOduration=6.458832305 podStartE2EDuration="1m13.468834089s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.839377755 +0000 UTC m=+890.211151909" lastFinishedPulling="2025-12-02 10:17:44.849379519 +0000 UTC m=+957.221153693" observedRunningTime="2025-12-02 10:17:48.463259435 +0000 UTC m=+960.835033589" watchObservedRunningTime="2025-12-02 10:17:48.468834089 +0000 UTC m=+960.840608243" Dec 02 10:17:48 crc kubenswrapper[4685]: E1202 10:17:48.818463 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" podUID="01015eb1-ac2f-4bc1-81d3-145ce402db5e" Dec 02 10:17:49 crc kubenswrapper[4685]: I1202 10:17:49.455616 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" event={"ID":"01015eb1-ac2f-4bc1-81d3-145ce402db5e","Type":"ContainerStarted","Data":"f429a82831611c378f3a754df2124ceba4acd21773dff5ef85ed5bb20436ba0e"} Dec 02 10:17:49 crc kubenswrapper[4685]: I1202 10:17:49.455929 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:17:51 crc kubenswrapper[4685]: I1202 10:17:51.469414 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" event={"ID":"901441b3-91d9-4edf-8955-cd5514589dec","Type":"ContainerStarted","Data":"7f43cf920d4e5f9d1764a24a910aac53700e0e58ad8d348a81db284f876b21cd"} Dec 02 10:17:51 crc kubenswrapper[4685]: I1202 10:17:51.471263 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" event={"ID":"8c138433-1126-4ffa-a017-19740e566084","Type":"ContainerStarted","Data":"965981be09ac27f7467777264f3bc66b60bc2349b1f1f2507a68d348b13d933b"} Dec 02 10:17:51 crc kubenswrapper[4685]: I1202 10:17:51.472746 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" event={"ID":"bb0c5cd2-9459-4e31-8613-f758d330dce2","Type":"ContainerStarted","Data":"acbddcf6dcb7129857dda8078240f27b1eb27ba2b17fefd8c715c5e905f8973c"} Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.539669 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" podUID="8c138433-1126-4ffa-a017-19740e566084" Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.539812 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" podUID="b16756f6-29e0-4e33-8e00-f7b0e193b958" Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.549788 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" podUID="901441b3-91d9-4edf-8955-cd5514589dec" Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.580035 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" podUID="e55320ae-8458-4802-aa07-e406f1b58fce" Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.596181 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" podUID="739ae795-6209-4881-8bf6-be391a810a86" Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.710449 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" podUID="20b551a4-85bc-4ecb-b502-08f844a6b911" Dec 02 10:17:51 crc kubenswrapper[4685]: E1202 10:17:51.710655 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" podUID="6513f83d-2079-477d-8976-68cb969806fe" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.495987 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" event={"ID":"30be1eaf-4d63-4fb6-9372-0857432b6b73","Type":"ContainerStarted","Data":"fad5c1ae2bdfa64c515928f8d101ff468d25cdde1adb6794e1a69e5ce3551ee8"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.496617 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.511187 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" event={"ID":"b16756f6-29e0-4e33-8e00-f7b0e193b958","Type":"ContainerStarted","Data":"0d837de185e7ec13241e15acd276c8e019f84629941ae95b61267af9d1ad3a6c"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.518743 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" event={"ID":"e55320ae-8458-4802-aa07-e406f1b58fce","Type":"ContainerStarted","Data":"4f8e44a5e63513e75de94e7d1799db070477f9cee814df9f794ec7c74d2b2741"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.521772 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" event={"ID":"59c1b39b-2153-4c88-9229-0e951b086fdd","Type":"ContainerStarted","Data":"96eb45361977ed5540af94c1b3ce1de2c1179dc15f7c79678f34f0563f31d16c"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.523499 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" event={"ID":"1177803f-ea41-40ed-8b1d-58c6761363f0","Type":"ContainerStarted","Data":"4ba1130b65cddfe0e1e8e949fa08a2ef603bd01499d067464e67ff57bf911649"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.524273 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.531694 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" event={"ID":"6513f83d-2079-477d-8976-68cb969806fe","Type":"ContainerStarted","Data":"08f5a40f03c5d61edda64c53ffc988ff340f7673b3f5f8d5ef2935ac2bbf2465"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.549698 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" event={"ID":"544b6fe9-890e-4b17-8f8e-55f53d64fcf7","Type":"ContainerStarted","Data":"5e78a802aee7dcf9556811f2421311ba14b01e6a4f5cb0f0920b882bd7fbd67c"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.550436 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.551388 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" podStartSLOduration=6.949127543 podStartE2EDuration="1m17.551370528s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.287301012 +0000 UTC m=+890.659075166" lastFinishedPulling="2025-12-02 10:17:48.889543997 +0000 UTC m=+961.261318151" observedRunningTime="2025-12-02 10:17:52.546899065 +0000 UTC m=+964.918673209" watchObservedRunningTime="2025-12-02 10:17:52.551370528 +0000 UTC m=+964.923144682" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.566876 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" event={"ID":"0a4bb15b-4ada-4698-9747-dfa600f319d3","Type":"ContainerStarted","Data":"3445a94cc0d85acf5825000115636714575d12419e159d4a1e13f7a231c2dd29"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.567537 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.570820 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.574459 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" event={"ID":"20b551a4-85bc-4ecb-b502-08f844a6b911","Type":"ContainerStarted","Data":"569db407c608932f666fb9451f715b8e677975de709a22c088d6969ccc4f07b2"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.590290 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" event={"ID":"36189be8-91c8-4b60-90d6-050a07ae86d3","Type":"ContainerStarted","Data":"24d91656d3ced33a941a1a6ff595156693e7fe8db70b2e1c5541a484918a852d"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.591315 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" event={"ID":"739ae795-6209-4881-8bf6-be391a810a86","Type":"ContainerStarted","Data":"510677ad19be7e0a0b5032706b128072cbb6bb1352668a477d2bf15b7c5c5de1"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.604762 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" event={"ID":"50904294-59b3-4a71-84f9-8f171cad02e2","Type":"ContainerStarted","Data":"49b332f2e61c6ad137117a4175ca9e3d36a3e74fe7767de0a987bfdcda6e580c"} Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.629530 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" podStartSLOduration=7.044003185 podStartE2EDuration="1m17.629504388s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.287530818 +0000 UTC m=+890.659304972" lastFinishedPulling="2025-12-02 10:17:48.873032021 +0000 UTC m=+961.244806175" observedRunningTime="2025-12-02 10:17:52.622771392 +0000 UTC m=+964.994545546" watchObservedRunningTime="2025-12-02 10:17:52.629504388 +0000 UTC m=+965.001278542" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.709549 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" podStartSLOduration=6.798809749 podStartE2EDuration="1m17.70953187s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.419460906 +0000 UTC m=+889.791235060" lastFinishedPulling="2025-12-02 10:17:48.330183037 +0000 UTC m=+960.701957181" observedRunningTime="2025-12-02 10:17:52.705439306 +0000 UTC m=+965.077213460" watchObservedRunningTime="2025-12-02 10:17:52.70953187 +0000 UTC m=+965.081306024" Dec 02 10:17:52 crc kubenswrapper[4685]: I1202 10:17:52.848891 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7tmr8" podStartSLOduration=10.760409852 podStartE2EDuration="1m17.848872672s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.760891108 +0000 UTC m=+890.132665262" lastFinishedPulling="2025-12-02 10:17:44.849353928 +0000 UTC m=+957.221128082" observedRunningTime="2025-12-02 10:17:52.844671005 +0000 UTC m=+965.216445159" watchObservedRunningTime="2025-12-02 10:17:52.848872672 +0000 UTC m=+965.220646826" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.616040 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" event={"ID":"01015eb1-ac2f-4bc1-81d3-145ce402db5e","Type":"ContainerStarted","Data":"2b01748b656e7a1d1f58a1554958c48fbaaf46847030102e2e2cf5ca784ede85"} Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.616964 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.618372 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" event={"ID":"36189be8-91c8-4b60-90d6-050a07ae86d3","Type":"ContainerStarted","Data":"145df74093ea3e608631766f2f5206571098feae75112f11e1a2a67bbe2a1e7a"} Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.619214 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.621557 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" event={"ID":"bb0c5cd2-9459-4e31-8613-f758d330dce2","Type":"ContainerStarted","Data":"234f85f48af0c15c1b18f14cace3aef4b48440408df8645c05ed0d05e488d96d"} Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.621750 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.623774 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" event={"ID":"e55320ae-8458-4802-aa07-e406f1b58fce","Type":"ContainerStarted","Data":"a1bc32ceb3b84f5cf407e67baea53ed5a452bcd4679b1a993661415808c444bc"} Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.623921 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.628946 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" event={"ID":"59c1b39b-2153-4c88-9229-0e951b086fdd","Type":"ContainerStarted","Data":"da8e121163190cde299f895c84fd50feac6fdf653ee60daf4757a17dbbcbcd23"} Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.629087 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.631826 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" event={"ID":"50904294-59b3-4a71-84f9-8f171cad02e2","Type":"ContainerStarted","Data":"7bcf9751ed50def7e2dca459e7b760b0b8bcc07526b50efe2024858fc32ef39c"} Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.632411 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.645934 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" podStartSLOduration=4.594075885 podStartE2EDuration="1m18.64589447s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.435546605 +0000 UTC m=+890.807320759" lastFinishedPulling="2025-12-02 10:17:52.48736519 +0000 UTC m=+964.859139344" observedRunningTime="2025-12-02 10:17:53.642554098 +0000 UTC m=+966.014328272" watchObservedRunningTime="2025-12-02 10:17:53.64589447 +0000 UTC m=+966.017668624" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.687098 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" podStartSLOduration=8.135959979 podStartE2EDuration="1m18.687078039s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.119591099 +0000 UTC m=+890.491365253" lastFinishedPulling="2025-12-02 10:17:48.670709159 +0000 UTC m=+961.042483313" observedRunningTime="2025-12-02 10:17:53.676857446 +0000 UTC m=+966.048631600" watchObservedRunningTime="2025-12-02 10:17:53.687078039 +0000 UTC m=+966.058852193" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.843790 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" podStartSLOduration=8.008913777 podStartE2EDuration="1m18.843771519s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.835895248 +0000 UTC m=+890.207669402" lastFinishedPulling="2025-12-02 10:17:48.67075299 +0000 UTC m=+961.042527144" observedRunningTime="2025-12-02 10:17:53.841701282 +0000 UTC m=+966.213475446" watchObservedRunningTime="2025-12-02 10:17:53.843771519 +0000 UTC m=+966.215545683" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.848962 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" podStartSLOduration=4.078894166 podStartE2EDuration="1m18.848947753s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.429905648 +0000 UTC m=+890.801679802" lastFinishedPulling="2025-12-02 10:17:53.199959235 +0000 UTC m=+965.571733389" observedRunningTime="2025-12-02 10:17:53.756203129 +0000 UTC m=+966.127977283" watchObservedRunningTime="2025-12-02 10:17:53.848947753 +0000 UTC m=+966.220721907" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.969769 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" podStartSLOduration=56.311443028 podStartE2EDuration="1m18.969753112s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:17:25.692838792 +0000 UTC m=+938.064612946" lastFinishedPulling="2025-12-02 10:17:48.351148866 +0000 UTC m=+960.722923030" observedRunningTime="2025-12-02 10:17:53.966358177 +0000 UTC m=+966.338132331" watchObservedRunningTime="2025-12-02 10:17:53.969753112 +0000 UTC m=+966.341527266" Dec 02 10:17:53 crc kubenswrapper[4685]: I1202 10:17:53.970877 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" podStartSLOduration=56.569026216 podStartE2EDuration="1m18.970871172s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:17:25.951006798 +0000 UTC m=+938.322780952" lastFinishedPulling="2025-12-02 10:17:48.352851754 +0000 UTC m=+960.724625908" observedRunningTime="2025-12-02 10:17:53.887867558 +0000 UTC m=+966.259641712" watchObservedRunningTime="2025-12-02 10:17:53.970871172 +0000 UTC m=+966.342645326" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.650811 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" event={"ID":"b16756f6-29e0-4e33-8e00-f7b0e193b958","Type":"ContainerStarted","Data":"516aae5466a0f429af4874abdc83103683131ce6442957680a33e112c42f8c17"} Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.651231 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.654880 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" event={"ID":"901441b3-91d9-4edf-8955-cd5514589dec","Type":"ContainerStarted","Data":"5cd9f3ee899911558aed14f5fa0a87a0255f848b2b362f2f2f68e0f8c16ecd78"} Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.655545 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.659383 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" event={"ID":"739ae795-6209-4881-8bf6-be391a810a86","Type":"ContainerStarted","Data":"8426e374a8dd5d1bc74a1c91febf8251030daad860e456722b9a01c9a9ac7d08"} Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.659684 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.661740 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" event={"ID":"20b551a4-85bc-4ecb-b502-08f844a6b911","Type":"ContainerStarted","Data":"7346859202f5828ac2c3998193955793fc123570b78211425ddfbc8116bb3f1d"} Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.662254 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.665035 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" event={"ID":"8c138433-1126-4ffa-a017-19740e566084","Type":"ContainerStarted","Data":"25bdbdce8f34e88670d0ef06779561c5f155e1fe63ec94a97f5b9192e634f278"} Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.665597 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.683918 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" podStartSLOduration=4.147919844 podStartE2EDuration="1m20.68388339s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.166535969 +0000 UTC m=+889.538310123" lastFinishedPulling="2025-12-02 10:17:53.702499515 +0000 UTC m=+966.074273669" observedRunningTime="2025-12-02 10:17:55.67995356 +0000 UTC m=+968.051727714" watchObservedRunningTime="2025-12-02 10:17:55.68388339 +0000 UTC m=+968.055657544" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.763991 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" podStartSLOduration=2.873629235 podStartE2EDuration="1m20.763972773s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.014828599 +0000 UTC m=+889.386602753" lastFinishedPulling="2025-12-02 10:17:54.905172127 +0000 UTC m=+967.276946291" observedRunningTime="2025-12-02 10:17:55.700502908 +0000 UTC m=+968.072277062" watchObservedRunningTime="2025-12-02 10:17:55.763972773 +0000 UTC m=+968.135746927" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.781210 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" podStartSLOduration=4.492162375 podStartE2EDuration="1m20.781194529s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.419498897 +0000 UTC m=+889.791273051" lastFinishedPulling="2025-12-02 10:17:53.708531051 +0000 UTC m=+966.080305205" observedRunningTime="2025-12-02 10:17:55.77977341 +0000 UTC m=+968.151547564" watchObservedRunningTime="2025-12-02 10:17:55.781194529 +0000 UTC m=+968.152968683" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.785456 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" podStartSLOduration=5.345450047 podStartE2EDuration="1m20.785446407s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.265442896 +0000 UTC m=+890.637217050" lastFinishedPulling="2025-12-02 10:17:53.705439256 +0000 UTC m=+966.077213410" observedRunningTime="2025-12-02 10:17:55.764365303 +0000 UTC m=+968.136139457" watchObservedRunningTime="2025-12-02 10:17:55.785446407 +0000 UTC m=+968.157220561" Dec 02 10:17:55 crc kubenswrapper[4685]: I1202 10:17:55.804159 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" podStartSLOduration=4.187181663 podStartE2EDuration="1m20.804143183s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.286753446 +0000 UTC m=+890.658527600" lastFinishedPulling="2025-12-02 10:17:54.903714966 +0000 UTC m=+967.275489120" observedRunningTime="2025-12-02 10:17:55.80040777 +0000 UTC m=+968.172181954" watchObservedRunningTime="2025-12-02 10:17:55.804143183 +0000 UTC m=+968.175917337" Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.075150 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-vz7bq" Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.277322 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bqctl" Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.309166 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-rngtk" Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.556120 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-njgr6" Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.661993 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-j5hhn" Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.671643 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" event={"ID":"6513f83d-2079-477d-8976-68cb969806fe","Type":"ContainerStarted","Data":"00f56e88ad86587efe3b35a33f490c2826ebe4090a6da7c0c06d1c02a57eb3ba"} Dec 02 10:17:56 crc kubenswrapper[4685]: I1202 10:17:56.673243 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" event={"ID":"226039ae-3d4c-41e0-8a24-985eb9f63d27","Type":"ContainerStarted","Data":"18953af6fb4712bb35afb139661e753977f2d27b9d448c3333c10463c47356d6"} Dec 02 10:17:57 crc kubenswrapper[4685]: I1202 10:17:57.563434 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-p62sl" Dec 02 10:17:57 crc kubenswrapper[4685]: I1202 10:17:57.686965 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:17:57 crc kubenswrapper[4685]: I1202 10:17:57.714680 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5pwml" podStartSLOduration=3.96539768 podStartE2EDuration="1m21.714657858s" podCreationTimestamp="2025-12-02 10:16:36 +0000 UTC" firstStartedPulling="2025-12-02 10:16:38.462359068 +0000 UTC m=+890.834133222" lastFinishedPulling="2025-12-02 10:17:56.211619236 +0000 UTC m=+968.583393400" observedRunningTime="2025-12-02 10:17:57.708035755 +0000 UTC m=+970.079809909" watchObservedRunningTime="2025-12-02 10:17:57.714657858 +0000 UTC m=+970.086432012" Dec 02 10:17:57 crc kubenswrapper[4685]: I1202 10:17:57.736487 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" podStartSLOduration=3.889434297 podStartE2EDuration="1m22.736465681s" podCreationTimestamp="2025-12-02 10:16:35 +0000 UTC" firstStartedPulling="2025-12-02 10:16:37.353887516 +0000 UTC m=+889.725661660" lastFinishedPulling="2025-12-02 10:17:56.20091889 +0000 UTC m=+968.572693044" observedRunningTime="2025-12-02 10:17:57.733325614 +0000 UTC m=+970.105099768" watchObservedRunningTime="2025-12-02 10:17:57.736465681 +0000 UTC m=+970.108239835" Dec 02 10:17:58 crc kubenswrapper[4685]: I1202 10:17:58.324639 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd49v57k" Dec 02 10:18:05 crc kubenswrapper[4685]: I1202 10:18:05.564054 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-vn545" Dec 02 10:18:05 crc kubenswrapper[4685]: I1202 10:18:05.630202 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xqspp" Dec 02 10:18:05 crc kubenswrapper[4685]: I1202 10:18:05.665678 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-xnk7k" Dec 02 10:18:05 crc kubenswrapper[4685]: I1202 10:18:05.830529 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-7qgfl" Dec 02 10:18:05 crc kubenswrapper[4685]: I1202 10:18:05.966469 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-5sn8t" Dec 02 10:18:06 crc kubenswrapper[4685]: I1202 10:18:06.446496 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-vx7jq" Dec 02 10:18:06 crc kubenswrapper[4685]: I1202 10:18:06.633218 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-695b4bc5dc-jxvqs" Dec 02 10:18:06 crc kubenswrapper[4685]: I1202 10:18:06.715164 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-zsxv4" Dec 02 10:18:06 crc kubenswrapper[4685]: I1202 10:18:06.753236 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2dpvv" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.734209 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6vrsl"] Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.737083 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.741073 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-89jh2" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.741385 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.743902 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.744248 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.763764 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6vrsl"] Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.829404 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-config\") pod \"dnsmasq-dns-675f4bcbfc-6vrsl\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.829457 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chf54\" (UniqueName: \"kubernetes.io/projected/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-kube-api-access-chf54\") pod \"dnsmasq-dns-675f4bcbfc-6vrsl\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.835105 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5gdfb"] Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.836700 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.845112 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.862620 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5gdfb"] Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.930692 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-config\") pod \"dnsmasq-dns-675f4bcbfc-6vrsl\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.930762 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chf54\" (UniqueName: \"kubernetes.io/projected/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-kube-api-access-chf54\") pod \"dnsmasq-dns-675f4bcbfc-6vrsl\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.931864 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-config\") pod \"dnsmasq-dns-675f4bcbfc-6vrsl\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:21 crc kubenswrapper[4685]: I1202 10:18:21.958633 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chf54\" (UniqueName: \"kubernetes.io/projected/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-kube-api-access-chf54\") pod \"dnsmasq-dns-675f4bcbfc-6vrsl\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.032921 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdkvv\" (UniqueName: \"kubernetes.io/projected/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-kube-api-access-cdkvv\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.032984 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.033060 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-config\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.054536 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.134483 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-config\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.134577 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdkvv\" (UniqueName: \"kubernetes.io/projected/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-kube-api-access-cdkvv\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.134600 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.135496 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.135723 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-config\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.152011 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdkvv\" (UniqueName: \"kubernetes.io/projected/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-kube-api-access-cdkvv\") pod \"dnsmasq-dns-78dd6ddcc-5gdfb\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.304335 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6vrsl"] Dec 02 10:18:22 crc kubenswrapper[4685]: W1202 10:18:22.309826 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d8a32af_d222_442e_b1ae_6a6ced2a5a32.slice/crio-106370ea8436fffc2beedd620a9db6d7374fdb0132ae91140adc1382649e0daf WatchSource:0}: Error finding container 106370ea8436fffc2beedd620a9db6d7374fdb0132ae91140adc1382649e0daf: Status 404 returned error can't find the container with id 106370ea8436fffc2beedd620a9db6d7374fdb0132ae91140adc1382649e0daf Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.450151 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.876537 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" event={"ID":"0d8a32af-d222-442e-b1ae-6a6ced2a5a32","Type":"ContainerStarted","Data":"106370ea8436fffc2beedd620a9db6d7374fdb0132ae91140adc1382649e0daf"} Dec 02 10:18:22 crc kubenswrapper[4685]: I1202 10:18:22.915600 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5gdfb"] Dec 02 10:18:22 crc kubenswrapper[4685]: W1202 10:18:22.924463 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52bcc9b0_072b_4ecb_93da_8bbe69eb0bf2.slice/crio-5a20165b19fd0e0d9f897c7843a86525ac262b0c7ca2752f3293787779a11a6d WatchSource:0}: Error finding container 5a20165b19fd0e0d9f897c7843a86525ac262b0c7ca2752f3293787779a11a6d: Status 404 returned error can't find the container with id 5a20165b19fd0e0d9f897c7843a86525ac262b0c7ca2752f3293787779a11a6d Dec 02 10:18:23 crc kubenswrapper[4685]: I1202 10:18:23.886661 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" event={"ID":"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2","Type":"ContainerStarted","Data":"5a20165b19fd0e0d9f897c7843a86525ac262b0c7ca2752f3293787779a11a6d"} Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.001851 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6vrsl"] Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.037269 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qddsz"] Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.038543 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.058660 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qddsz"] Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.178666 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-config\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.178731 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.178766 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dnkw\" (UniqueName: \"kubernetes.io/projected/d4eb4e8f-b419-490b-a0e5-026b87d06730-kube-api-access-6dnkw\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.280251 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.280306 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dnkw\" (UniqueName: \"kubernetes.io/projected/d4eb4e8f-b419-490b-a0e5-026b87d06730-kube-api-access-6dnkw\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.280403 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-config\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.281302 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-config\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.281316 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.339036 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dnkw\" (UniqueName: \"kubernetes.io/projected/d4eb4e8f-b419-490b-a0e5-026b87d06730-kube-api-access-6dnkw\") pod \"dnsmasq-dns-666b6646f7-qddsz\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.384082 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.385767 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5gdfb"] Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.476979 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-t2vpz"] Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.482029 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.523629 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-t2vpz"] Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.589441 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-config\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.589576 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7znv\" (UniqueName: \"kubernetes.io/projected/c6865118-2f52-4465-b1f7-173ac10698c7-kube-api-access-x7znv\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.589605 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.691508 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-config\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.691697 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7znv\" (UniqueName: \"kubernetes.io/projected/c6865118-2f52-4465-b1f7-173ac10698c7-kube-api-access-x7znv\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.692188 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.692726 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-config\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.693180 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.726394 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7znv\" (UniqueName: \"kubernetes.io/projected/c6865118-2f52-4465-b1f7-173ac10698c7-kube-api-access-x7znv\") pod \"dnsmasq-dns-57d769cc4f-t2vpz\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:25 crc kubenswrapper[4685]: I1202 10:18:25.843394 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.135450 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qddsz"] Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.198911 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.200183 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.203295 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nkmv4" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.203457 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.207225 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.207624 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.207812 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.208268 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.209145 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.229410 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305022 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305098 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305177 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc2sl\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-kube-api-access-fc2sl\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305230 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305256 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305270 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305292 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305308 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305357 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305376 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-config-data\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.305402 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.406874 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.406915 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.406943 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc2sl\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-kube-api-access-fc2sl\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.406966 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.406992 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.407007 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.407031 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.407044 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.407074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.407092 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-config-data\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.407119 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.409851 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.411533 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-pod-info\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.411785 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.413094 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-server-conf\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.413241 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.413678 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-config-data\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.419751 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.420789 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.437188 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.441487 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-t2vpz"] Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.446944 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc2sl\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-kube-api-access-fc2sl\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.448666 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.456389 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.581935 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.623659 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.625526 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.628480 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.629752 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.643243 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.643472 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.643761 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.644044 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-96wnm" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.644246 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.653819 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.720843 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.720899 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.720928 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.720959 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722505 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722552 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722602 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722625 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722676 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqlst\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-kube-api-access-fqlst\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722739 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.722795 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824362 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824413 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824458 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824478 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824492 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824508 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824536 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824553 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824572 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824600 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.824632 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqlst\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-kube-api-access-fqlst\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.826121 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.827712 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.832091 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.833120 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.834297 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.836043 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.836956 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.837256 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.843282 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.845815 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.853153 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqlst\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-kube-api-access-fqlst\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.869613 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.930175 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" event={"ID":"d4eb4e8f-b419-490b-a0e5-026b87d06730","Type":"ContainerStarted","Data":"b4f1e6e10339c49cb8f5e6509becaeec392e2c792d868dc28c2c14dbbe0f659c"} Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.931769 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" event={"ID":"c6865118-2f52-4465-b1f7-173ac10698c7","Type":"ContainerStarted","Data":"ba97c4d1a84c08669d429381c9639b8a92362c63d2d8e490488bb8a46b068925"} Dec 02 10:18:26 crc kubenswrapper[4685]: I1202 10:18:26.990629 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.300028 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.677125 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:18:27 crc kubenswrapper[4685]: W1202 10:18:27.710890 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0366597_4ac4_482d_ba5f_dfa2956d1fb3.slice/crio-f7a749d9e3b8907be762a78cfff654b9fbd673229705469e448ef1126e2d98a1 WatchSource:0}: Error finding container f7a749d9e3b8907be762a78cfff654b9fbd673229705469e448ef1126e2d98a1: Status 404 returned error can't find the container with id f7a749d9e3b8907be762a78cfff654b9fbd673229705469e448ef1126e2d98a1 Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.935244 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.937235 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.937315 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.940914 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.941168 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-8jq6z" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.945265 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.945510 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.952018 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.963110 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"72a08bf5-82d6-48e0-a188-c7ac46ba22b4","Type":"ContainerStarted","Data":"cdbdb1a50e3641169eab39aaa66f8dd8d62adc9f36631638934552e7a65f81ef"} Dec 02 10:18:27 crc kubenswrapper[4685]: I1202 10:18:27.982928 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0366597-4ac4-482d-ba5f-dfa2956d1fb3","Type":"ContainerStarted","Data":"f7a749d9e3b8907be762a78cfff654b9fbd673229705469e448ef1126e2d98a1"} Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064078 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/13cc5021-b162-434b-8ae9-d3781b6f421e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064147 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064180 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rht5j\" (UniqueName: \"kubernetes.io/projected/13cc5021-b162-434b-8ae9-d3781b6f421e-kube-api-access-rht5j\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064229 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064272 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/13cc5021-b162-434b-8ae9-d3781b6f421e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064303 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13cc5021-b162-434b-8ae9-d3781b6f421e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064327 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-config-data-default\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.064360 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-kolla-config\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.167937 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/13cc5021-b162-434b-8ae9-d3781b6f421e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.168577 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.168663 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rht5j\" (UniqueName: \"kubernetes.io/projected/13cc5021-b162-434b-8ae9-d3781b6f421e-kube-api-access-rht5j\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.168790 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.169524 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.169763 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/13cc5021-b162-434b-8ae9-d3781b6f421e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.169854 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13cc5021-b162-434b-8ae9-d3781b6f421e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.169891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-config-data-default\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.169964 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-kolla-config\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.170353 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.170901 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-kolla-config\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.171330 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/13cc5021-b162-434b-8ae9-d3781b6f421e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.171948 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/13cc5021-b162-434b-8ae9-d3781b6f421e-config-data-default\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.191976 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13cc5021-b162-434b-8ae9-d3781b6f421e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.195352 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rht5j\" (UniqueName: \"kubernetes.io/projected/13cc5021-b162-434b-8ae9-d3781b6f421e-kube-api-access-rht5j\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.213252 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/13cc5021-b162-434b-8ae9-d3781b6f421e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.258662 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"13cc5021-b162-434b-8ae9-d3781b6f421e\") " pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.278247 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 02 10:18:28 crc kubenswrapper[4685]: I1202 10:18:28.914533 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.003700 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"13cc5021-b162-434b-8ae9-d3781b6f421e","Type":"ContainerStarted","Data":"c80b74ca192a5edf1dd4aa2b577b60b52524658d679db1eb11898770ab537cf2"} Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.337176 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.338830 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.349880 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.350698 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.354125 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.354355 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-pv4qm" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.368002 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401241 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401288 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401305 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ed831e-7122-4e6f-a320-51df1849c1d7-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401335 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401358 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4m8f\" (UniqueName: \"kubernetes.io/projected/67ed831e-7122-4e6f-a320-51df1849c1d7-kube-api-access-c4m8f\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401374 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/67ed831e-7122-4e6f-a320-51df1849c1d7-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401407 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.401442 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/67ed831e-7122-4e6f-a320-51df1849c1d7-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.502871 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.502941 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.502969 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ed831e-7122-4e6f-a320-51df1849c1d7-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.503007 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.503037 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4m8f\" (UniqueName: \"kubernetes.io/projected/67ed831e-7122-4e6f-a320-51df1849c1d7-kube-api-access-c4m8f\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.503059 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/67ed831e-7122-4e6f-a320-51df1849c1d7-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.503108 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.503161 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/67ed831e-7122-4e6f-a320-51df1849c1d7-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.504303 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/67ed831e-7122-4e6f-a320-51df1849c1d7-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.504451 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.504716 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.505336 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.506286 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.511215 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-vrjt4" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.511435 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.512086 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.522624 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.524766 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67ed831e-7122-4e6f-a320-51df1849c1d7-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.549864 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/67ed831e-7122-4e6f-a320-51df1849c1d7-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.555268 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/67ed831e-7122-4e6f-a320-51df1849c1d7-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.555658 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4m8f\" (UniqueName: \"kubernetes.io/projected/67ed831e-7122-4e6f-a320-51df1849c1d7-kube-api-access-c4m8f\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.581676 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"67ed831e-7122-4e6f-a320-51df1849c1d7\") " pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.599877 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.612382 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449de76a-491d-4874-8b46-df24eb5c628a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.612444 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/449de76a-491d-4874-8b46-df24eb5c628a-config-data\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.612529 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/449de76a-491d-4874-8b46-df24eb5c628a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.612545 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtm2f\" (UniqueName: \"kubernetes.io/projected/449de76a-491d-4874-8b46-df24eb5c628a-kube-api-access-xtm2f\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.612564 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/449de76a-491d-4874-8b46-df24eb5c628a-kolla-config\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.679717 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.714207 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/449de76a-491d-4874-8b46-df24eb5c628a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.714272 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtm2f\" (UniqueName: \"kubernetes.io/projected/449de76a-491d-4874-8b46-df24eb5c628a-kube-api-access-xtm2f\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.714292 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/449de76a-491d-4874-8b46-df24eb5c628a-kolla-config\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.714374 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449de76a-491d-4874-8b46-df24eb5c628a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.714398 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/449de76a-491d-4874-8b46-df24eb5c628a-config-data\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.715382 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/449de76a-491d-4874-8b46-df24eb5c628a-kolla-config\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.715387 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/449de76a-491d-4874-8b46-df24eb5c628a-config-data\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.723220 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449de76a-491d-4874-8b46-df24eb5c628a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.753709 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/449de76a-491d-4874-8b46-df24eb5c628a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.769988 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtm2f\" (UniqueName: \"kubernetes.io/projected/449de76a-491d-4874-8b46-df24eb5c628a-kube-api-access-xtm2f\") pod \"memcached-0\" (UID: \"449de76a-491d-4874-8b46-df24eb5c628a\") " pod="openstack/memcached-0" Dec 02 10:18:29 crc kubenswrapper[4685]: I1202 10:18:29.931746 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 02 10:18:31 crc kubenswrapper[4685]: I1202 10:18:31.934381 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:18:31 crc kubenswrapper[4685]: I1202 10:18:31.935365 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:18:31 crc kubenswrapper[4685]: I1202 10:18:31.935432 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 10:18:31 crc kubenswrapper[4685]: I1202 10:18:31.949781 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-wplk6" Dec 02 10:18:31 crc kubenswrapper[4685]: I1202 10:18:31.974524 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz5mm\" (UniqueName: \"kubernetes.io/projected/886b7e36-bc06-45fe-a10d-3840c1f68d24-kube-api-access-lz5mm\") pod \"kube-state-metrics-0\" (UID: \"886b7e36-bc06-45fe-a10d-3840c1f68d24\") " pod="openstack/kube-state-metrics-0" Dec 02 10:18:32 crc kubenswrapper[4685]: I1202 10:18:32.094250 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz5mm\" (UniqueName: \"kubernetes.io/projected/886b7e36-bc06-45fe-a10d-3840c1f68d24-kube-api-access-lz5mm\") pod \"kube-state-metrics-0\" (UID: \"886b7e36-bc06-45fe-a10d-3840c1f68d24\") " pod="openstack/kube-state-metrics-0" Dec 02 10:18:32 crc kubenswrapper[4685]: I1202 10:18:32.115009 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz5mm\" (UniqueName: \"kubernetes.io/projected/886b7e36-bc06-45fe-a10d-3840c1f68d24-kube-api-access-lz5mm\") pod \"kube-state-metrics-0\" (UID: \"886b7e36-bc06-45fe-a10d-3840c1f68d24\") " pod="openstack/kube-state-metrics-0" Dec 02 10:18:32 crc kubenswrapper[4685]: I1202 10:18:32.296021 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 10:18:33 crc kubenswrapper[4685]: I1202 10:18:33.968272 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2sntp"] Dec 02 10:18:33 crc kubenswrapper[4685]: I1202 10:18:33.970316 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sntp" Dec 02 10:18:33 crc kubenswrapper[4685]: I1202 10:18:33.973213 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-tsbj6" Dec 02 10:18:33 crc kubenswrapper[4685]: I1202 10:18:33.973407 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 02 10:18:33 crc kubenswrapper[4685]: I1202 10:18:33.973504 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.005592 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-c8mrc"] Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.010115 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.027637 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sntp"] Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.040546 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-c8mrc"] Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.049428 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-etc-ovs\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.050084 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-lib\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.050425 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee7d3162-98e7-4af9-aad0-2098e23d1743-combined-ca-bundle\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.051079 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-run-ovn\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.051402 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-run\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.051658 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee7d3162-98e7-4af9-aad0-2098e23d1743-scripts\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.052168 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-run\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.052368 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxmb4\" (UniqueName: \"kubernetes.io/projected/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-kube-api-access-bxmb4\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.052603 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq4fj\" (UniqueName: \"kubernetes.io/projected/ee7d3162-98e7-4af9-aad0-2098e23d1743-kube-api-access-cq4fj\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.052800 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-scripts\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.052943 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-log-ovn\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.053126 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee7d3162-98e7-4af9-aad0-2098e23d1743-ovn-controller-tls-certs\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.053281 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-log\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154744 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-lib\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154814 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee7d3162-98e7-4af9-aad0-2098e23d1743-combined-ca-bundle\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154839 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-run-ovn\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154876 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-run\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee7d3162-98e7-4af9-aad0-2098e23d1743-scripts\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154910 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-run\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154933 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxmb4\" (UniqueName: \"kubernetes.io/projected/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-kube-api-access-bxmb4\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154952 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq4fj\" (UniqueName: \"kubernetes.io/projected/ee7d3162-98e7-4af9-aad0-2098e23d1743-kube-api-access-cq4fj\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154973 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-scripts\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.154997 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-log-ovn\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.155017 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee7d3162-98e7-4af9-aad0-2098e23d1743-ovn-controller-tls-certs\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.155037 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-log\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.155070 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-etc-ovs\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.155519 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-etc-ovs\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.156659 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-lib\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.160244 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-scripts\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.160474 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-log-ovn\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.162545 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-log\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.162720 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-run\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.162777 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/ee7d3162-98e7-4af9-aad0-2098e23d1743-var-run-ovn\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.163974 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee7d3162-98e7-4af9-aad0-2098e23d1743-combined-ca-bundle\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.169793 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee7d3162-98e7-4af9-aad0-2098e23d1743-ovn-controller-tls-certs\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.172117 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ee7d3162-98e7-4af9-aad0-2098e23d1743-scripts\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.173889 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq4fj\" (UniqueName: \"kubernetes.io/projected/ee7d3162-98e7-4af9-aad0-2098e23d1743-kube-api-access-cq4fj\") pod \"ovn-controller-2sntp\" (UID: \"ee7d3162-98e7-4af9-aad0-2098e23d1743\") " pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.173958 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxmb4\" (UniqueName: \"kubernetes.io/projected/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-kube-api-access-bxmb4\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.176528 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/eac3c6e2-9ac7-40a0-91e5-a54010d07a99-var-run\") pod \"ovn-controller-ovs-c8mrc\" (UID: \"eac3c6e2-9ac7-40a0-91e5-a54010d07a99\") " pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.304904 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sntp" Dec 02 10:18:34 crc kubenswrapper[4685]: I1202 10:18:34.329883 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.967327 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.969766 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.974441 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-rw9s6" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.974721 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.974987 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.975088 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.975121 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 02 10:18:36 crc kubenswrapper[4685]: I1202 10:18:36.982824 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005605 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0875b84e-91b6-4092-b8b3-a75abd86728d-config\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005649 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005727 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjbl5\" (UniqueName: \"kubernetes.io/projected/0875b84e-91b6-4092-b8b3-a75abd86728d-kube-api-access-sjbl5\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005756 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005779 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0875b84e-91b6-4092-b8b3-a75abd86728d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005795 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005866 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.005881 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0875b84e-91b6-4092-b8b3-a75abd86728d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.106965 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107021 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0875b84e-91b6-4092-b8b3-a75abd86728d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107043 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107132 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107157 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0875b84e-91b6-4092-b8b3-a75abd86728d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107222 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0875b84e-91b6-4092-b8b3-a75abd86728d-config\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107244 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107290 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjbl5\" (UniqueName: \"kubernetes.io/projected/0875b84e-91b6-4092-b8b3-a75abd86728d-kube-api-access-sjbl5\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.107720 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0875b84e-91b6-4092-b8b3-a75abd86728d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.108317 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.108781 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0875b84e-91b6-4092-b8b3-a75abd86728d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.109175 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0875b84e-91b6-4092-b8b3-a75abd86728d-config\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.116125 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.116746 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.117427 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0875b84e-91b6-4092-b8b3-a75abd86728d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.136518 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjbl5\" (UniqueName: \"kubernetes.io/projected/0875b84e-91b6-4092-b8b3-a75abd86728d-kube-api-access-sjbl5\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.170756 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0875b84e-91b6-4092-b8b3-a75abd86728d\") " pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:37 crc kubenswrapper[4685]: I1202 10:18:37.300972 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.542458 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.543989 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.546517 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.546640 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.546538 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ztfvv" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.546807 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.563778 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.730312 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03278bdb-7697-4dd0-b482-97b93aa055ba-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.730406 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptsl8\" (UniqueName: \"kubernetes.io/projected/03278bdb-7697-4dd0-b482-97b93aa055ba-kube-api-access-ptsl8\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.730446 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.730489 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.730645 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.730713 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03278bdb-7697-4dd0-b482-97b93aa055ba-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.731090 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.731185 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03278bdb-7697-4dd0-b482-97b93aa055ba-config\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833192 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833257 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03278bdb-7697-4dd0-b482-97b93aa055ba-config\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833326 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03278bdb-7697-4dd0-b482-97b93aa055ba-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833356 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptsl8\" (UniqueName: \"kubernetes.io/projected/03278bdb-7697-4dd0-b482-97b93aa055ba-kube-api-access-ptsl8\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833380 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833415 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833442 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833467 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03278bdb-7697-4dd0-b482-97b93aa055ba-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833573 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.833854 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/03278bdb-7697-4dd0-b482-97b93aa055ba-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.834684 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03278bdb-7697-4dd0-b482-97b93aa055ba-config\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.834721 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03278bdb-7697-4dd0-b482-97b93aa055ba-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.838657 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.839390 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.845457 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03278bdb-7697-4dd0-b482-97b93aa055ba-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.853547 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptsl8\" (UniqueName: \"kubernetes.io/projected/03278bdb-7697-4dd0-b482-97b93aa055ba-kube-api-access-ptsl8\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.857229 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"03278bdb-7697-4dd0-b482-97b93aa055ba\") " pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:38 crc kubenswrapper[4685]: I1202 10:18:38.864512 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 02 10:18:50 crc kubenswrapper[4685]: E1202 10:18:50.501189 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 02 10:18:50 crc kubenswrapper[4685]: E1202 10:18:50.502290 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fc2sl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(72a08bf5-82d6-48e0-a188-c7ac46ba22b4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:18:50 crc kubenswrapper[4685]: E1202 10:18:50.503448 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" Dec 02 10:18:50 crc kubenswrapper[4685]: E1202 10:18:50.574299 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 02 10:18:50 crc kubenswrapper[4685]: E1202 10:18:50.574517 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fqlst,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(e0366597-4ac4-482d-ba5f-dfa2956d1fb3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:18:50 crc kubenswrapper[4685]: E1202 10:18:50.575731 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" Dec 02 10:18:51 crc kubenswrapper[4685]: E1202 10:18:51.223435 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" Dec 02 10:18:51 crc kubenswrapper[4685]: E1202 10:18:51.222813 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.157523 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.158048 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x7znv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-t2vpz_openstack(c6865118-2f52-4465-b1f7-173ac10698c7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.159478 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.179320 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.179493 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-chf54,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-6vrsl_openstack(0d8a32af-d222-442e-b1ae-6a6ced2a5a32): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.187319 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" podUID="0d8a32af-d222-442e-b1ae-6a6ced2a5a32" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.189438 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.189605 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cdkvv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-5gdfb_openstack(52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.190726 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" podUID="52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.258643 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.259486 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6dnkw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-qddsz_openstack(d4eb4e8f-b419-490b-a0e5-026b87d06730): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.261082 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" podUID="d4eb4e8f-b419-490b-a0e5-026b87d06730" Dec 02 10:18:54 crc kubenswrapper[4685]: E1202 10:18:54.271933 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" Dec 02 10:18:54 crc kubenswrapper[4685]: I1202 10:18:54.809331 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:18:54 crc kubenswrapper[4685]: W1202 10:18:54.841855 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod886b7e36_bc06_45fe_a10d_3840c1f68d24.slice/crio-bea7a533f33baee701fe00774e682fc8942cfd688dfd8c28e3c19a137e2188f8 WatchSource:0}: Error finding container bea7a533f33baee701fe00774e682fc8942cfd688dfd8c28e3c19a137e2188f8: Status 404 returned error can't find the container with id bea7a533f33baee701fe00774e682fc8942cfd688dfd8c28e3c19a137e2188f8 Dec 02 10:18:54 crc kubenswrapper[4685]: I1202 10:18:54.855205 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sntp"] Dec 02 10:18:54 crc kubenswrapper[4685]: I1202 10:18:54.868870 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 02 10:18:54 crc kubenswrapper[4685]: I1202 10:18:54.904687 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:54 crc kubenswrapper[4685]: I1202 10:18:54.913894 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.023020 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chf54\" (UniqueName: \"kubernetes.io/projected/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-kube-api-access-chf54\") pod \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.024022 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-dns-svc\") pod \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.024211 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-config\") pod \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\" (UID: \"0d8a32af-d222-442e-b1ae-6a6ced2a5a32\") " Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.024263 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-config\") pod \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.024291 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdkvv\" (UniqueName: \"kubernetes.io/projected/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-kube-api-access-cdkvv\") pod \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\" (UID: \"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2\") " Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.025318 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2" (UID: "52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.025089 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-config" (OuterVolumeSpecName: "config") pod "0d8a32af-d222-442e-b1ae-6a6ced2a5a32" (UID: "0d8a32af-d222-442e-b1ae-6a6ced2a5a32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.025426 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-config" (OuterVolumeSpecName: "config") pod "52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2" (UID: "52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.026455 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.026482 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.026496 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.026546 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.027641 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-kube-api-access-cdkvv" (OuterVolumeSpecName: "kube-api-access-cdkvv") pod "52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2" (UID: "52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2"). InnerVolumeSpecName "kube-api-access-cdkvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.027705 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-kube-api-access-chf54" (OuterVolumeSpecName: "kube-api-access-chf54") pod "0d8a32af-d222-442e-b1ae-6a6ced2a5a32" (UID: "0d8a32af-d222-442e-b1ae-6a6ced2a5a32"). InnerVolumeSpecName "kube-api-access-chf54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.127630 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chf54\" (UniqueName: \"kubernetes.io/projected/0d8a32af-d222-442e-b1ae-6a6ced2a5a32-kube-api-access-chf54\") on node \"crc\" DevicePath \"\"" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.127663 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdkvv\" (UniqueName: \"kubernetes.io/projected/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2-kube-api-access-cdkvv\") on node \"crc\" DevicePath \"\"" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.262458 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"13cc5021-b162-434b-8ae9-d3781b6f421e","Type":"ContainerStarted","Data":"f76543b45bad85b28e498697c9728bdc8ca3cd6984c4adb6ac7dcbcf6d59bafc"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.272272 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" event={"ID":"52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2","Type":"ContainerDied","Data":"5a20165b19fd0e0d9f897c7843a86525ac262b0c7ca2752f3293787779a11a6d"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.272392 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-5gdfb" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.282608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"886b7e36-bc06-45fe-a10d-3840c1f68d24","Type":"ContainerStarted","Data":"bea7a533f33baee701fe00774e682fc8942cfd688dfd8c28e3c19a137e2188f8"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.295275 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sntp" event={"ID":"ee7d3162-98e7-4af9-aad0-2098e23d1743","Type":"ContainerStarted","Data":"18171a118fce29bc3bc695adf6925c7d91beb52e2f715cc2647462026deb40bb"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.297070 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"67ed831e-7122-4e6f-a320-51df1849c1d7","Type":"ContainerStarted","Data":"290a211cab0b1089c3f7da3ca4ba44f7b0de51cacd4897e4b272ce89707143d1"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.297099 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"67ed831e-7122-4e6f-a320-51df1849c1d7","Type":"ContainerStarted","Data":"331d30a1610bca753e314cd757357c772b44ca4f1eefe49edecb1609d1482fe2"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.300981 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"449de76a-491d-4874-8b46-df24eb5c628a","Type":"ContainerStarted","Data":"fbf17f0286c6703cfb1ba5d515d8371d9b348f8500b517ea1b7364bc5b83ce1f"} Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.309728 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.309799 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-6vrsl" event={"ID":"0d8a32af-d222-442e-b1ae-6a6ced2a5a32","Type":"ContainerDied","Data":"106370ea8436fffc2beedd620a9db6d7374fdb0132ae91140adc1382649e0daf"} Dec 02 10:18:55 crc kubenswrapper[4685]: E1202 10:18:55.364288 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" podUID="d4eb4e8f-b419-490b-a0e5-026b87d06730" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.409412 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.452387 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6vrsl"] Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.461651 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-6vrsl"] Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.496996 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5gdfb"] Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.513402 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-5gdfb"] Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.918993 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d8a32af-d222-442e-b1ae-6a6ced2a5a32" path="/var/lib/kubelet/pods/0d8a32af-d222-442e-b1ae-6a6ced2a5a32/volumes" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.919474 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2" path="/var/lib/kubelet/pods/52bcc9b0-072b-4ecb-93da-8bbe69eb0bf2/volumes" Dec 02 10:18:55 crc kubenswrapper[4685]: I1202 10:18:55.929636 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-c8mrc"] Dec 02 10:18:56 crc kubenswrapper[4685]: I1202 10:18:56.324544 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c8mrc" event={"ID":"eac3c6e2-9ac7-40a0-91e5-a54010d07a99","Type":"ContainerStarted","Data":"205f75ff0c4354abf80c0daaf9721ed00a058e703f99405c4c2b8c3283b303f6"} Dec 02 10:18:56 crc kubenswrapper[4685]: I1202 10:18:56.330108 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0875b84e-91b6-4092-b8b3-a75abd86728d","Type":"ContainerStarted","Data":"b86ec1190e634f7d394bf20e5f401dcf0bde4ff83753e92a958152cc6ca1336e"} Dec 02 10:18:56 crc kubenswrapper[4685]: I1202 10:18:56.356646 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 02 10:18:56 crc kubenswrapper[4685]: W1202 10:18:56.363974 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03278bdb_7697_4dd0_b482_97b93aa055ba.slice/crio-66ec2c3e7f39835dc91e24cd98e973ee53c012dcffa8c103a9efb246a8e813c1 WatchSource:0}: Error finding container 66ec2c3e7f39835dc91e24cd98e973ee53c012dcffa8c103a9efb246a8e813c1: Status 404 returned error can't find the container with id 66ec2c3e7f39835dc91e24cd98e973ee53c012dcffa8c103a9efb246a8e813c1 Dec 02 10:18:57 crc kubenswrapper[4685]: I1202 10:18:57.343734 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"03278bdb-7697-4dd0-b482-97b93aa055ba","Type":"ContainerStarted","Data":"66ec2c3e7f39835dc91e24cd98e973ee53c012dcffa8c103a9efb246a8e813c1"} Dec 02 10:18:58 crc kubenswrapper[4685]: I1202 10:18:58.355354 4685 generic.go:334] "Generic (PLEG): container finished" podID="13cc5021-b162-434b-8ae9-d3781b6f421e" containerID="f76543b45bad85b28e498697c9728bdc8ca3cd6984c4adb6ac7dcbcf6d59bafc" exitCode=0 Dec 02 10:18:58 crc kubenswrapper[4685]: I1202 10:18:58.355440 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"13cc5021-b162-434b-8ae9-d3781b6f421e","Type":"ContainerDied","Data":"f76543b45bad85b28e498697c9728bdc8ca3cd6984c4adb6ac7dcbcf6d59bafc"} Dec 02 10:18:59 crc kubenswrapper[4685]: I1202 10:18:59.366595 4685 generic.go:334] "Generic (PLEG): container finished" podID="67ed831e-7122-4e6f-a320-51df1849c1d7" containerID="290a211cab0b1089c3f7da3ca4ba44f7b0de51cacd4897e4b272ce89707143d1" exitCode=0 Dec 02 10:18:59 crc kubenswrapper[4685]: I1202 10:18:59.366649 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"67ed831e-7122-4e6f-a320-51df1849c1d7","Type":"ContainerDied","Data":"290a211cab0b1089c3f7da3ca4ba44f7b0de51cacd4897e4b272ce89707143d1"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.393129 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"886b7e36-bc06-45fe-a10d-3840c1f68d24","Type":"ContainerStarted","Data":"fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.394501 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.395631 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"03278bdb-7697-4dd0-b482-97b93aa055ba","Type":"ContainerStarted","Data":"1455072da17c2f097cd723d8737a24e9c7b900cbcc3335a8756c4ba6d3581d70"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.397528 4685 generic.go:334] "Generic (PLEG): container finished" podID="eac3c6e2-9ac7-40a0-91e5-a54010d07a99" containerID="d847aeb3e9d8624001740c601eca9754f38932e7236f8f3f76b413109cb6db8a" exitCode=0 Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.397634 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c8mrc" event={"ID":"eac3c6e2-9ac7-40a0-91e5-a54010d07a99","Type":"ContainerDied","Data":"d847aeb3e9d8624001740c601eca9754f38932e7236f8f3f76b413109cb6db8a"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.400119 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sntp" event={"ID":"ee7d3162-98e7-4af9-aad0-2098e23d1743","Type":"ContainerStarted","Data":"837a48e40d6c9a5dd33706f921666a7740e3a72b4cf4179534316435732f8076"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.400277 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-2sntp" Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.407220 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"67ed831e-7122-4e6f-a320-51df1849c1d7","Type":"ContainerStarted","Data":"2316b59b39764acd471053abf659d315e852d092c5bd14a2abbcee9683574a34"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.416768 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=24.835742484 podStartE2EDuration="31.416741459s" podCreationTimestamp="2025-12-02 10:18:31 +0000 UTC" firstStartedPulling="2025-12-02 10:18:54.848215779 +0000 UTC m=+1027.219989933" lastFinishedPulling="2025-12-02 10:19:01.429214754 +0000 UTC m=+1033.800988908" observedRunningTime="2025-12-02 10:19:02.410211779 +0000 UTC m=+1034.781985933" watchObservedRunningTime="2025-12-02 10:19:02.416741459 +0000 UTC m=+1034.788515613" Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.419033 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"449de76a-491d-4874-8b46-df24eb5c628a","Type":"ContainerStarted","Data":"ca0b23136f1c662b4f7a7da1a2d7e4cea4f4e2c240dc6f51bf44adc3dbb7fd8a"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.419504 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.425406 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0875b84e-91b6-4092-b8b3-a75abd86728d","Type":"ContainerStarted","Data":"7fbe155bdb95b954f5f9c1c59c3dcfbb18e00faf31841edbfa660ca950636af7"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.432490 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"13cc5021-b162-434b-8ae9-d3781b6f421e","Type":"ContainerStarted","Data":"44acb73c901f205ab21fa066552a5997437f89733053e1de4a799c0feb1b082d"} Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.435800 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=34.435777765 podStartE2EDuration="34.435777765s" podCreationTimestamp="2025-12-02 10:18:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:02.429385438 +0000 UTC m=+1034.801159592" watchObservedRunningTime="2025-12-02 10:19:02.435777765 +0000 UTC m=+1034.807551919" Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.472848 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-2sntp" podStartSLOduration=22.880292994 podStartE2EDuration="29.472823859s" podCreationTimestamp="2025-12-02 10:18:33 +0000 UTC" firstStartedPulling="2025-12-02 10:18:54.859646864 +0000 UTC m=+1027.231421018" lastFinishedPulling="2025-12-02 10:19:01.452177729 +0000 UTC m=+1033.823951883" observedRunningTime="2025-12-02 10:19:02.469335063 +0000 UTC m=+1034.841109217" watchObservedRunningTime="2025-12-02 10:19:02.472823859 +0000 UTC m=+1034.844598013" Dec 02 10:19:02 crc kubenswrapper[4685]: I1202 10:19:02.491371 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=27.716116937 podStartE2EDuration="33.491354532s" podCreationTimestamp="2025-12-02 10:18:29 +0000 UTC" firstStartedPulling="2025-12-02 10:18:55.032517153 +0000 UTC m=+1027.404291307" lastFinishedPulling="2025-12-02 10:19:00.807754748 +0000 UTC m=+1033.179528902" observedRunningTime="2025-12-02 10:19:02.485210941 +0000 UTC m=+1034.856985095" watchObservedRunningTime="2025-12-02 10:19:02.491354532 +0000 UTC m=+1034.863128686" Dec 02 10:19:03 crc kubenswrapper[4685]: I1202 10:19:03.443920 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0366597-4ac4-482d-ba5f-dfa2956d1fb3","Type":"ContainerStarted","Data":"b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871"} Dec 02 10:19:03 crc kubenswrapper[4685]: I1202 10:19:03.448326 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c8mrc" event={"ID":"eac3c6e2-9ac7-40a0-91e5-a54010d07a99","Type":"ContainerStarted","Data":"d9a0c9da4a63674295ba5c702109f017a955d3658b37a2eb70179bb4d2a574ee"} Dec 02 10:19:03 crc kubenswrapper[4685]: I1202 10:19:03.448380 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c8mrc" event={"ID":"eac3c6e2-9ac7-40a0-91e5-a54010d07a99","Type":"ContainerStarted","Data":"3a765a8bb141f22bbc917a0ede73e9c769b47053a933e9859e815c0633cb494f"} Dec 02 10:19:03 crc kubenswrapper[4685]: I1202 10:19:03.480821 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=12.272386642 podStartE2EDuration="37.480797289s" podCreationTimestamp="2025-12-02 10:18:26 +0000 UTC" firstStartedPulling="2025-12-02 10:18:28.982900366 +0000 UTC m=+1001.354674520" lastFinishedPulling="2025-12-02 10:18:54.191311013 +0000 UTC m=+1026.563085167" observedRunningTime="2025-12-02 10:19:02.507792845 +0000 UTC m=+1034.879566999" watchObservedRunningTime="2025-12-02 10:19:03.480797289 +0000 UTC m=+1035.852571443" Dec 02 10:19:03 crc kubenswrapper[4685]: I1202 10:19:03.504026 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-c8mrc" podStartSLOduration=25.285132933 podStartE2EDuration="30.50400193s" podCreationTimestamp="2025-12-02 10:18:33 +0000 UTC" firstStartedPulling="2025-12-02 10:18:56.232182341 +0000 UTC m=+1028.603956495" lastFinishedPulling="2025-12-02 10:19:01.451051338 +0000 UTC m=+1033.822825492" observedRunningTime="2025-12-02 10:19:03.497219193 +0000 UTC m=+1035.868993347" watchObservedRunningTime="2025-12-02 10:19:03.50400193 +0000 UTC m=+1035.875776084" Dec 02 10:19:04 crc kubenswrapper[4685]: I1202 10:19:04.330706 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:19:04 crc kubenswrapper[4685]: I1202 10:19:04.330782 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:19:08 crc kubenswrapper[4685]: I1202 10:19:08.279033 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 02 10:19:08 crc kubenswrapper[4685]: I1202 10:19:08.279609 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 02 10:19:09 crc kubenswrapper[4685]: I1202 10:19:09.680492 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 02 10:19:09 crc kubenswrapper[4685]: I1202 10:19:09.680799 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 02 10:19:09 crc kubenswrapper[4685]: I1202 10:19:09.933713 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 02 10:19:11 crc kubenswrapper[4685]: I1202 10:19:11.037586 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 02 10:19:11 crc kubenswrapper[4685]: I1202 10:19:11.150919 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.340774 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qddsz"] Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.365587 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.435084 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-fwh8c"] Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.436377 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.455319 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-fwh8c"] Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.523838 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"03278bdb-7697-4dd0-b482-97b93aa055ba","Type":"ContainerStarted","Data":"6713ad2d7cc04c7f6039468e3172ae5092597770914a6eb5262a6f892bd02cce"} Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.525786 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0875b84e-91b6-4092-b8b3-a75abd86728d","Type":"ContainerStarted","Data":"a628a736cb0a8996caf8228a589134fa26f87b77d159237b4384a94b2b6a54bf"} Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.527659 4685 generic.go:334] "Generic (PLEG): container finished" podID="d4eb4e8f-b419-490b-a0e5-026b87d06730" containerID="17420fa015893ab8f8ec57c75d7251084d49657897e33ddf55cc46ca7154b9b7" exitCode=0 Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.527708 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" event={"ID":"d4eb4e8f-b419-490b-a0e5-026b87d06730","Type":"ContainerDied","Data":"17420fa015893ab8f8ec57c75d7251084d49657897e33ddf55cc46ca7154b9b7"} Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.529687 4685 generic.go:334] "Generic (PLEG): container finished" podID="c6865118-2f52-4465-b1f7-173ac10698c7" containerID="2f9ac289008f0b229698686942c8200b77a2439e1bd17384918fcf477c23de96" exitCode=0 Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.529715 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" event={"ID":"c6865118-2f52-4465-b1f7-173ac10698c7","Type":"ContainerDied","Data":"2f9ac289008f0b229698686942c8200b77a2439e1bd17384918fcf477c23de96"} Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.546319 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jcgk\" (UniqueName: \"kubernetes.io/projected/157ac17b-8d0d-4474-82c1-01ee73434ad5-kube-api-access-5jcgk\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.546575 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.546623 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-config\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.575156 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=20.451510821 podStartE2EDuration="35.575139305s" podCreationTimestamp="2025-12-02 10:18:37 +0000 UTC" firstStartedPulling="2025-12-02 10:18:56.376390417 +0000 UTC m=+1028.748164571" lastFinishedPulling="2025-12-02 10:19:11.500018901 +0000 UTC m=+1043.871793055" observedRunningTime="2025-12-02 10:19:12.573050287 +0000 UTC m=+1044.944824451" watchObservedRunningTime="2025-12-02 10:19:12.575139305 +0000 UTC m=+1044.946913459" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.633874 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=21.562630038 podStartE2EDuration="37.633850917s" podCreationTimestamp="2025-12-02 10:18:35 +0000 UTC" firstStartedPulling="2025-12-02 10:18:55.412819574 +0000 UTC m=+1027.784593728" lastFinishedPulling="2025-12-02 10:19:11.484040463 +0000 UTC m=+1043.855814607" observedRunningTime="2025-12-02 10:19:12.63029028 +0000 UTC m=+1045.002064434" watchObservedRunningTime="2025-12-02 10:19:12.633850917 +0000 UTC m=+1045.005625071" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.650104 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.650700 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-config\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.651021 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.651662 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jcgk\" (UniqueName: \"kubernetes.io/projected/157ac17b-8d0d-4474-82c1-01ee73434ad5-kube-api-access-5jcgk\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.651792 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-config\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.699147 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jcgk\" (UniqueName: \"kubernetes.io/projected/157ac17b-8d0d-4474-82c1-01ee73434ad5-kube-api-access-5jcgk\") pod \"dnsmasq-dns-7cb5889db5-fwh8c\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.824464 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:12 crc kubenswrapper[4685]: I1202 10:19:12.958240 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:19:12 crc kubenswrapper[4685]: E1202 10:19:12.978053 4685 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 02 10:19:12 crc kubenswrapper[4685]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/c6865118-2f52-4465-b1f7-173ac10698c7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 02 10:19:12 crc kubenswrapper[4685]: > podSandboxID="ba97c4d1a84c08669d429381c9639b8a92362c63d2d8e490488bb8a46b068925" Dec 02 10:19:12 crc kubenswrapper[4685]: E1202 10:19:12.978203 4685 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 02 10:19:12 crc kubenswrapper[4685]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x7znv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-t2vpz_openstack(c6865118-2f52-4465-b1f7-173ac10698c7): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/c6865118-2f52-4465-b1f7-173ac10698c7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 02 10:19:12 crc kubenswrapper[4685]: > logger="UnhandledError" Dec 02 10:19:12 crc kubenswrapper[4685]: E1202 10:19:12.981642 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/c6865118-2f52-4465-b1f7-173ac10698c7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.060437 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-dns-svc\") pod \"d4eb4e8f-b419-490b-a0e5-026b87d06730\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.061203 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-config\") pod \"d4eb4e8f-b419-490b-a0e5-026b87d06730\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.061245 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dnkw\" (UniqueName: \"kubernetes.io/projected/d4eb4e8f-b419-490b-a0e5-026b87d06730-kube-api-access-6dnkw\") pod \"d4eb4e8f-b419-490b-a0e5-026b87d06730\" (UID: \"d4eb4e8f-b419-490b-a0e5-026b87d06730\") " Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.070789 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4eb4e8f-b419-490b-a0e5-026b87d06730-kube-api-access-6dnkw" (OuterVolumeSpecName: "kube-api-access-6dnkw") pod "d4eb4e8f-b419-490b-a0e5-026b87d06730" (UID: "d4eb4e8f-b419-490b-a0e5-026b87d06730"). InnerVolumeSpecName "kube-api-access-6dnkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.081789 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d4eb4e8f-b419-490b-a0e5-026b87d06730" (UID: "d4eb4e8f-b419-490b-a0e5-026b87d06730"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.086098 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-config" (OuterVolumeSpecName: "config") pod "d4eb4e8f-b419-490b-a0e5-026b87d06730" (UID: "d4eb4e8f-b419-490b-a0e5-026b87d06730"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.163664 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.163997 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4eb4e8f-b419-490b-a0e5-026b87d06730-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.164023 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dnkw\" (UniqueName: \"kubernetes.io/projected/d4eb4e8f-b419-490b-a0e5-026b87d06730-kube-api-access-6dnkw\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.302551 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.342685 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.352078 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-fwh8c"] Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.540002 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"72a08bf5-82d6-48e0-a188-c7ac46ba22b4","Type":"ContainerStarted","Data":"ab362a7a28d7c14f4bc121a5fe40bd1d254d7dfc824d5973af3c51bd6cc6e44b"} Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.542571 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" event={"ID":"157ac17b-8d0d-4474-82c1-01ee73434ad5","Type":"ContainerStarted","Data":"0e9364e6fbf57eefb917bb98bf0772dd2fcfb231e571aa96812ab6914a902623"} Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.544219 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" event={"ID":"d4eb4e8f-b419-490b-a0e5-026b87d06730","Type":"ContainerDied","Data":"b4f1e6e10339c49cb8f5e6509becaeec392e2c792d868dc28c2c14dbbe0f659c"} Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.544273 4685 scope.go:117] "RemoveContainer" containerID="17420fa015893ab8f8ec57c75d7251084d49657897e33ddf55cc46ca7154b9b7" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.544340 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qddsz" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.544526 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.562407 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 02 10:19:13 crc kubenswrapper[4685]: E1202 10:19:13.562846 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4eb4e8f-b419-490b-a0e5-026b87d06730" containerName="init" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.562868 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4eb4e8f-b419-490b-a0e5-026b87d06730" containerName="init" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.563071 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4eb4e8f-b419-490b-a0e5-026b87d06730" containerName="init" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.584539 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.586627 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.592455 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-wdkwn" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.592631 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.608246 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.608522 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.652934 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.665206 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qddsz"] Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.670775 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8a509f21-bfbc-4240-be77-1d5ca83344cf-cache\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.670845 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8a509f21-bfbc-4240-be77-1d5ca83344cf-lock\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.671663 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssns2\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-kube-api-access-ssns2\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.671723 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.671786 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.677934 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qddsz"] Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.773364 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8a509f21-bfbc-4240-be77-1d5ca83344cf-cache\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.773684 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8a509f21-bfbc-4240-be77-1d5ca83344cf-lock\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.773749 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssns2\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-kube-api-access-ssns2\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.773798 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.773843 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8a509f21-bfbc-4240-be77-1d5ca83344cf-cache\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.773852 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.774121 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.774960 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8a509f21-bfbc-4240-be77-1d5ca83344cf-lock\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: E1202 10:19:13.775330 4685 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 10:19:13 crc kubenswrapper[4685]: E1202 10:19:13.775347 4685 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 10:19:13 crc kubenswrapper[4685]: E1202 10:19:13.775380 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift podName:8a509f21-bfbc-4240-be77-1d5ca83344cf nodeName:}" failed. No retries permitted until 2025-12-02 10:19:14.275365434 +0000 UTC m=+1046.647139588 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift") pod "swift-storage-0" (UID: "8a509f21-bfbc-4240-be77-1d5ca83344cf") : configmap "swift-ring-files" not found Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.790243 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssns2\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-kube-api-access-ssns2\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.793276 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.828690 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.864631 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.910029 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4eb4e8f-b419-490b-a0e5-026b87d06730" path="/var/lib/kubelet/pods/d4eb4e8f-b419-490b-a0e5-026b87d06730/volumes" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.943610 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.951497 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-t2vpz"] Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.987586 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-n9xtt"] Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.988763 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:13 crc kubenswrapper[4685]: I1202 10:19:13.994475 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.011900 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-n9xtt"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.106946 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-nbrzh"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.107909 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.111243 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.111504 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.113195 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.132700 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-98qz8"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.133696 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.137205 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.154179 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-nbrzh"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.168976 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-98qz8"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.190338 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpknz\" (UniqueName: \"kubernetes.io/projected/4ec4e0fd-aabb-435c-943f-3c6192679c7f-kube-api-access-cpknz\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.190499 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-config\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.190798 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.190906 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294357 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12ca7abe-455f-4bfc-9da9-420462c92e69-config\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294404 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ca7abe-455f-4bfc-9da9-420462c92e69-combined-ca-bundle\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294438 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln8qw\" (UniqueName: \"kubernetes.io/projected/12ca7abe-455f-4bfc-9da9-420462c92e69-kube-api-access-ln8qw\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294487 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpknz\" (UniqueName: \"kubernetes.io/projected/4ec4e0fd-aabb-435c-943f-3c6192679c7f-kube-api-access-cpknz\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294516 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-dispersionconf\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294534 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-config\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294551 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/12ca7abe-455f-4bfc-9da9-420462c92e69-ovs-rundir\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294592 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-ring-data-devices\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294627 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b83b134-73c4-447e-99a2-a49c814e589c-etc-swift\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294654 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294673 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294689 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-combined-ca-bundle\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294705 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294724 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9djk\" (UniqueName: \"kubernetes.io/projected/0b83b134-73c4-447e-99a2-a49c814e589c-kube-api-access-b9djk\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294747 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ca7abe-455f-4bfc-9da9-420462c92e69-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294766 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-scripts\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294804 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/12ca7abe-455f-4bfc-9da9-420462c92e69-ovn-rundir\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.294829 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-swiftconf\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: E1202 10:19:14.294981 4685 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 10:19:14 crc kubenswrapper[4685]: E1202 10:19:14.295000 4685 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 10:19:14 crc kubenswrapper[4685]: E1202 10:19:14.295037 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift podName:8a509f21-bfbc-4240-be77-1d5ca83344cf nodeName:}" failed. No retries permitted until 2025-12-02 10:19:15.295022354 +0000 UTC m=+1047.666796498 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift") pod "swift-storage-0" (UID: "8a509f21-bfbc-4240-be77-1d5ca83344cf") : configmap "swift-ring-files" not found Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.295418 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-config\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.296018 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-dns-svc\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.297207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.332759 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpknz\" (UniqueName: \"kubernetes.io/projected/4ec4e0fd-aabb-435c-943f-3c6192679c7f-kube-api-access-cpknz\") pod \"dnsmasq-dns-74f6f696b9-n9xtt\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.339078 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396738 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b83b134-73c4-447e-99a2-a49c814e589c-etc-swift\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396818 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-combined-ca-bundle\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396853 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9djk\" (UniqueName: \"kubernetes.io/projected/0b83b134-73c4-447e-99a2-a49c814e589c-kube-api-access-b9djk\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396886 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ca7abe-455f-4bfc-9da9-420462c92e69-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396921 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-scripts\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396965 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/12ca7abe-455f-4bfc-9da9-420462c92e69-ovn-rundir\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.396998 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-swiftconf\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397039 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12ca7abe-455f-4bfc-9da9-420462c92e69-config\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397069 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ca7abe-455f-4bfc-9da9-420462c92e69-combined-ca-bundle\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397098 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln8qw\" (UniqueName: \"kubernetes.io/projected/12ca7abe-455f-4bfc-9da9-420462c92e69-kube-api-access-ln8qw\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397147 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-dispersionconf\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397179 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/12ca7abe-455f-4bfc-9da9-420462c92e69-ovs-rundir\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397230 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-ring-data-devices\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397580 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/12ca7abe-455f-4bfc-9da9-420462c92e69-ovn-rundir\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.397999 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b83b134-73c4-447e-99a2-a49c814e589c-etc-swift\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.400027 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-scripts\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.400648 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-swiftconf\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.400811 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12ca7abe-455f-4bfc-9da9-420462c92e69-config\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.402216 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/12ca7abe-455f-4bfc-9da9-420462c92e69-ovs-rundir\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.402745 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-ring-data-devices\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.405622 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-dispersionconf\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.406340 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/12ca7abe-455f-4bfc-9da9-420462c92e69-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.407761 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-combined-ca-bundle\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.413744 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12ca7abe-455f-4bfc-9da9-420462c92e69-combined-ca-bundle\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.454010 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9djk\" (UniqueName: \"kubernetes.io/projected/0b83b134-73c4-447e-99a2-a49c814e589c-kube-api-access-b9djk\") pod \"swift-ring-rebalance-nbrzh\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.463118 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-fwh8c"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.467308 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln8qw\" (UniqueName: \"kubernetes.io/projected/12ca7abe-455f-4bfc-9da9-420462c92e69-kube-api-access-ln8qw\") pod \"ovn-controller-metrics-98qz8\" (UID: \"12ca7abe-455f-4bfc-9da9-420462c92e69\") " pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.497946 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-dns-svc\") pod \"c6865118-2f52-4465-b1f7-173ac10698c7\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.498037 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7znv\" (UniqueName: \"kubernetes.io/projected/c6865118-2f52-4465-b1f7-173ac10698c7-kube-api-access-x7znv\") pod \"c6865118-2f52-4465-b1f7-173ac10698c7\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.498076 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-config\") pod \"c6865118-2f52-4465-b1f7-173ac10698c7\" (UID: \"c6865118-2f52-4465-b1f7-173ac10698c7\") " Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.505935 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6865118-2f52-4465-b1f7-173ac10698c7-kube-api-access-x7znv" (OuterVolumeSpecName: "kube-api-access-x7znv") pod "c6865118-2f52-4465-b1f7-173ac10698c7" (UID: "c6865118-2f52-4465-b1f7-173ac10698c7"). InnerVolumeSpecName "kube-api-access-x7znv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.521316 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-4z2pd"] Dec 02 10:19:14 crc kubenswrapper[4685]: E1202 10:19:14.521785 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" containerName="init" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.521808 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" containerName="init" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.521998 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" containerName="init" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.523865 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.529135 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.548744 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-4z2pd"] Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.570863 4685 generic.go:334] "Generic (PLEG): container finished" podID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerID="786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035" exitCode=0 Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.570925 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" event={"ID":"157ac17b-8d0d-4474-82c1-01ee73434ad5","Type":"ContainerDied","Data":"786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035"} Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.587376 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" event={"ID":"c6865118-2f52-4465-b1f7-173ac10698c7","Type":"ContainerDied","Data":"ba97c4d1a84c08669d429381c9639b8a92362c63d2d8e490488bb8a46b068925"} Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.587427 4685 scope.go:117] "RemoveContainer" containerID="2f9ac289008f0b229698686942c8200b77a2439e1bd17384918fcf477c23de96" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.588030 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-t2vpz" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.605183 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.608435 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.608486 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-config\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.608513 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.608579 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4z95\" (UniqueName: \"kubernetes.io/projected/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-kube-api-access-b4z95\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.608611 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-dns-svc\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.608751 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7znv\" (UniqueName: \"kubernetes.io/projected/c6865118-2f52-4465-b1f7-173ac10698c7-kube-api-access-x7znv\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.621432 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-config" (OuterVolumeSpecName: "config") pod "c6865118-2f52-4465-b1f7-173ac10698c7" (UID: "c6865118-2f52-4465-b1f7-173ac10698c7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.636432 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c6865118-2f52-4465-b1f7-173ac10698c7" (UID: "c6865118-2f52-4465-b1f7-173ac10698c7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713047 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713144 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-config\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713179 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713322 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4z95\" (UniqueName: \"kubernetes.io/projected/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-kube-api-access-b4z95\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713361 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-dns-svc\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713486 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.713510 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6865118-2f52-4465-b1f7-173ac10698c7-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.715142 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-config\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.715672 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.716801 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.716888 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-dns-svc\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.723991 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.747761 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-98qz8" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.760548 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4z95\" (UniqueName: \"kubernetes.io/projected/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-kube-api-access-b4z95\") pod \"dnsmasq-dns-698758b865-4z2pd\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.865083 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.918759 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.962311 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 02 10:19:14 crc kubenswrapper[4685]: I1202 10:19:14.996990 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-t2vpz"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.054738 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-t2vpz"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.123743 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-n9xtt"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.202625 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-nbrzh"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.335443 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.336601 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-98qz8"] Dec 02 10:19:15 crc kubenswrapper[4685]: E1202 10:19:15.336738 4685 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 10:19:15 crc kubenswrapper[4685]: E1202 10:19:15.336757 4685 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 10:19:15 crc kubenswrapper[4685]: E1202 10:19:15.336796 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift podName:8a509f21-bfbc-4240-be77-1d5ca83344cf nodeName:}" failed. No retries permitted until 2025-12-02 10:19:17.336782401 +0000 UTC m=+1049.708556555 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift") pod "swift-storage-0" (UID: "8a509f21-bfbc-4240-be77-1d5ca83344cf") : configmap "swift-ring-files" not found Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.605434 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-4z2pd"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.618452 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-98qz8" event={"ID":"12ca7abe-455f-4bfc-9da9-420462c92e69","Type":"ContainerStarted","Data":"a76fc2095b214dd88cd487f19fec4f65c0e009b61df5d70e28ebd9bcd834ef48"} Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.619354 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nbrzh" event={"ID":"0b83b134-73c4-447e-99a2-a49c814e589c","Type":"ContainerStarted","Data":"727e151e24975d16a2d4dbd1d44ad16e9e74f57c07cfa73aeb3421f782135142"} Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.627713 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" event={"ID":"4ec4e0fd-aabb-435c-943f-3c6192679c7f","Type":"ContainerStarted","Data":"5db13560880bf00bf26615af0e1786bde1fa09d7f1f7c365856236f84c8d98e8"} Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.639740 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerName="dnsmasq-dns" containerID="cri-o://604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a" gracePeriod=10 Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.640960 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" event={"ID":"157ac17b-8d0d-4474-82c1-01ee73434ad5","Type":"ContainerStarted","Data":"604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a"} Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.665079 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" podStartSLOduration=3.665063516 podStartE2EDuration="3.665063516s" podCreationTimestamp="2025-12-02 10:19:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:15.663452242 +0000 UTC m=+1048.035226396" watchObservedRunningTime="2025-12-02 10:19:15.665063516 +0000 UTC m=+1048.036837670" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.695849 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.876169 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.878322 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.882849 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-q2gft" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.883107 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.883654 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.902144 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.940574 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6865118-2f52-4465-b1f7-173ac10698c7" path="/var/lib/kubelet/pods/c6865118-2f52-4465-b1f7-173ac10698c7/volumes" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.941445 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.950873 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.950938 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.950969 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-config\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.950983 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.951036 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4ncq\" (UniqueName: \"kubernetes.io/projected/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-kube-api-access-p4ncq\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.951073 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:15 crc kubenswrapper[4685]: I1202 10:19:15.951108 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-scripts\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.053905 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.053982 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.054010 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.054030 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-config\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.054096 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4ncq\" (UniqueName: \"kubernetes.io/projected/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-kube-api-access-p4ncq\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.054155 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.054201 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-scripts\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.055121 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-scripts\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.059396 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-config\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.065362 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.065646 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.070237 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.073367 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.079842 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4ncq\" (UniqueName: \"kubernetes.io/projected/a8e44cc8-073c-4db3-af8b-c6b18bb2c808-kube-api-access-p4ncq\") pod \"ovn-northd-0\" (UID: \"a8e44cc8-073c-4db3-af8b-c6b18bb2c808\") " pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.145192 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.203030 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.263768 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jcgk\" (UniqueName: \"kubernetes.io/projected/157ac17b-8d0d-4474-82c1-01ee73434ad5-kube-api-access-5jcgk\") pod \"157ac17b-8d0d-4474-82c1-01ee73434ad5\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.263978 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-config\") pod \"157ac17b-8d0d-4474-82c1-01ee73434ad5\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.264026 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-dns-svc\") pod \"157ac17b-8d0d-4474-82c1-01ee73434ad5\" (UID: \"157ac17b-8d0d-4474-82c1-01ee73434ad5\") " Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.270064 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157ac17b-8d0d-4474-82c1-01ee73434ad5-kube-api-access-5jcgk" (OuterVolumeSpecName: "kube-api-access-5jcgk") pod "157ac17b-8d0d-4474-82c1-01ee73434ad5" (UID: "157ac17b-8d0d-4474-82c1-01ee73434ad5"). InnerVolumeSpecName "kube-api-access-5jcgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.305487 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "157ac17b-8d0d-4474-82c1-01ee73434ad5" (UID: "157ac17b-8d0d-4474-82c1-01ee73434ad5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.326281 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-config" (OuterVolumeSpecName: "config") pod "157ac17b-8d0d-4474-82c1-01ee73434ad5" (UID: "157ac17b-8d0d-4474-82c1-01ee73434ad5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.367718 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jcgk\" (UniqueName: \"kubernetes.io/projected/157ac17b-8d0d-4474-82c1-01ee73434ad5-kube-api-access-5jcgk\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.368648 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.368661 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/157ac17b-8d0d-4474-82c1-01ee73434ad5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.659679 4685 generic.go:334] "Generic (PLEG): container finished" podID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerID="2c8f1cc614cfe626b938571a3f94e9d411ab779506a85c9914cba88c1da5d0ab" exitCode=0 Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.659775 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-4z2pd" event={"ID":"c0fc2dca-b50f-47c7-b772-e4426cfdda3e","Type":"ContainerDied","Data":"2c8f1cc614cfe626b938571a3f94e9d411ab779506a85c9914cba88c1da5d0ab"} Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.659823 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-4z2pd" event={"ID":"c0fc2dca-b50f-47c7-b772-e4426cfdda3e","Type":"ContainerStarted","Data":"cd3c6ed87da3b0d1c93a5e70b46a8f9d8033a76231b29f1ec51ccf1efeb85fba"} Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.661827 4685 generic.go:334] "Generic (PLEG): container finished" podID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerID="8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f" exitCode=0 Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.661891 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" event={"ID":"4ec4e0fd-aabb-435c-943f-3c6192679c7f","Type":"ContainerDied","Data":"8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f"} Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.665208 4685 generic.go:334] "Generic (PLEG): container finished" podID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerID="604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a" exitCode=0 Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.665320 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.665319 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" event={"ID":"157ac17b-8d0d-4474-82c1-01ee73434ad5","Type":"ContainerDied","Data":"604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a"} Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.665398 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-fwh8c" event={"ID":"157ac17b-8d0d-4474-82c1-01ee73434ad5","Type":"ContainerDied","Data":"0e9364e6fbf57eefb917bb98bf0772dd2fcfb231e571aa96812ab6914a902623"} Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.665422 4685 scope.go:117] "RemoveContainer" containerID="604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.682661 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-98qz8" event={"ID":"12ca7abe-455f-4bfc-9da9-420462c92e69","Type":"ContainerStarted","Data":"3d7c96a8b6e412ddbf871664f555b505132df5af78d7fd900717a8ccb34eac0d"} Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.692579 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.737424 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-98qz8" podStartSLOduration=2.737405533 podStartE2EDuration="2.737405533s" podCreationTimestamp="2025-12-02 10:19:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:16.733076544 +0000 UTC m=+1049.104850698" watchObservedRunningTime="2025-12-02 10:19:16.737405533 +0000 UTC m=+1049.109179687" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.933862 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-fwh8c"] Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.936622 4685 scope.go:117] "RemoveContainer" containerID="786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.940577 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-fwh8c"] Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.978880 4685 scope.go:117] "RemoveContainer" containerID="604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a" Dec 02 10:19:16 crc kubenswrapper[4685]: E1202 10:19:16.985373 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a\": container with ID starting with 604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a not found: ID does not exist" containerID="604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.985402 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a"} err="failed to get container status \"604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a\": rpc error: code = NotFound desc = could not find container \"604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a\": container with ID starting with 604264ae0b736d1744222a529a8f5f722f2ffb5920667c8dccb6b28f1d8e7d8a not found: ID does not exist" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.985422 4685 scope.go:117] "RemoveContainer" containerID="786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035" Dec 02 10:19:16 crc kubenswrapper[4685]: E1202 10:19:16.985936 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035\": container with ID starting with 786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035 not found: ID does not exist" containerID="786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035" Dec 02 10:19:16 crc kubenswrapper[4685]: I1202 10:19:16.985976 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035"} err="failed to get container status \"786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035\": rpc error: code = NotFound desc = could not find container \"786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035\": container with ID starting with 786a6c8837d8e87f4adc45b52a9da5293c326914e98e000ac6a949b0c8608035 not found: ID does not exist" Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.394241 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:17 crc kubenswrapper[4685]: E1202 10:19:17.394443 4685 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 10:19:17 crc kubenswrapper[4685]: E1202 10:19:17.394473 4685 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 10:19:17 crc kubenswrapper[4685]: E1202 10:19:17.394591 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift podName:8a509f21-bfbc-4240-be77-1d5ca83344cf nodeName:}" failed. No retries permitted until 2025-12-02 10:19:21.394517687 +0000 UTC m=+1053.766291841 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift") pod "swift-storage-0" (UID: "8a509f21-bfbc-4240-be77-1d5ca83344cf") : configmap "swift-ring-files" not found Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.688172 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a8e44cc8-073c-4db3-af8b-c6b18bb2c808","Type":"ContainerStarted","Data":"ad85076a450e90ebf3078b7d4d7db093f54501f5838f5bdda589d53c3b7bbb37"} Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.690809 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-4z2pd" event={"ID":"c0fc2dca-b50f-47c7-b772-e4426cfdda3e","Type":"ContainerStarted","Data":"d91d88942af04a2dbe44f8ded4030504a1a921f7253df6947bafaba58dac1f0d"} Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.690902 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.692820 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" event={"ID":"4ec4e0fd-aabb-435c-943f-3c6192679c7f","Type":"ContainerStarted","Data":"536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857"} Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.692938 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.716491 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podStartSLOduration=3.716470258 podStartE2EDuration="3.716470258s" podCreationTimestamp="2025-12-02 10:19:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:17.710799003 +0000 UTC m=+1050.082573167" watchObservedRunningTime="2025-12-02 10:19:17.716470258 +0000 UTC m=+1050.088244422" Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.751441 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" podStartSLOduration=4.751420989 podStartE2EDuration="4.751420989s" podCreationTimestamp="2025-12-02 10:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:17.744695714 +0000 UTC m=+1050.116469888" watchObservedRunningTime="2025-12-02 10:19:17.751420989 +0000 UTC m=+1050.123195143" Dec 02 10:19:17 crc kubenswrapper[4685]: I1202 10:19:17.912528 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" path="/var/lib/kubelet/pods/157ac17b-8d0d-4474-82c1-01ee73434ad5/volumes" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.634993 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7827-account-create-update-5p2bw"] Dec 02 10:19:19 crc kubenswrapper[4685]: E1202 10:19:19.636342 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerName="init" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.636674 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerName="init" Dec 02 10:19:19 crc kubenswrapper[4685]: E1202 10:19:19.636739 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerName="dnsmasq-dns" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.636793 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerName="dnsmasq-dns" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.637393 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="157ac17b-8d0d-4474-82c1-01ee73434ad5" containerName="dnsmasq-dns" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.638046 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.642436 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.644936 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-brv75"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.646264 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.652200 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7827-account-create-update-5p2bw"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.682757 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-brv75"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.743995 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e3d77d5-8563-42ce-b25a-527cf438fa69-operator-scripts\") pod \"keystone-db-create-brv75\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.744048 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da7f8da-6317-414b-8c60-be3c86223e70-operator-scripts\") pod \"keystone-7827-account-create-update-5p2bw\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.744105 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hth9\" (UniqueName: \"kubernetes.io/projected/3da7f8da-6317-414b-8c60-be3c86223e70-kube-api-access-9hth9\") pod \"keystone-7827-account-create-update-5p2bw\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.744153 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mddcf\" (UniqueName: \"kubernetes.io/projected/6e3d77d5-8563-42ce-b25a-527cf438fa69-kube-api-access-mddcf\") pod \"keystone-db-create-brv75\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.772766 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-64lh8"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.773786 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.794368 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-64lh8"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.845876 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mddcf\" (UniqueName: \"kubernetes.io/projected/6e3d77d5-8563-42ce-b25a-527cf438fa69-kube-api-access-mddcf\") pod \"keystone-db-create-brv75\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.845970 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94326915-eb21-493c-a1cb-38dee42578ca-operator-scripts\") pod \"placement-db-create-64lh8\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.846056 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e3d77d5-8563-42ce-b25a-527cf438fa69-operator-scripts\") pod \"keystone-db-create-brv75\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.846098 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da7f8da-6317-414b-8c60-be3c86223e70-operator-scripts\") pod \"keystone-7827-account-create-update-5p2bw\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.846173 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnr8h\" (UniqueName: \"kubernetes.io/projected/94326915-eb21-493c-a1cb-38dee42578ca-kube-api-access-hnr8h\") pod \"placement-db-create-64lh8\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.846206 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hth9\" (UniqueName: \"kubernetes.io/projected/3da7f8da-6317-414b-8c60-be3c86223e70-kube-api-access-9hth9\") pod \"keystone-7827-account-create-update-5p2bw\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.847292 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e3d77d5-8563-42ce-b25a-527cf438fa69-operator-scripts\") pod \"keystone-db-create-brv75\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.847539 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da7f8da-6317-414b-8c60-be3c86223e70-operator-scripts\") pod \"keystone-7827-account-create-update-5p2bw\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.891471 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-70ae-account-create-update-hrxr2"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.892294 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mddcf\" (UniqueName: \"kubernetes.io/projected/6e3d77d5-8563-42ce-b25a-527cf438fa69-kube-api-access-mddcf\") pod \"keystone-db-create-brv75\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " pod="openstack/keystone-db-create-brv75" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.892544 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.895824 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hth9\" (UniqueName: \"kubernetes.io/projected/3da7f8da-6317-414b-8c60-be3c86223e70-kube-api-access-9hth9\") pod \"keystone-7827-account-create-update-5p2bw\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.902750 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.913194 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-70ae-account-create-update-hrxr2"] Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.947469 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98f0fc5-1d50-448e-8694-1ce6a21302ad-operator-scripts\") pod \"placement-70ae-account-create-update-hrxr2\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.947542 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnr8h\" (UniqueName: \"kubernetes.io/projected/94326915-eb21-493c-a1cb-38dee42578ca-kube-api-access-hnr8h\") pod \"placement-db-create-64lh8\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.947684 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98m5k\" (UniqueName: \"kubernetes.io/projected/a98f0fc5-1d50-448e-8694-1ce6a21302ad-kube-api-access-98m5k\") pod \"placement-70ae-account-create-update-hrxr2\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.947713 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94326915-eb21-493c-a1cb-38dee42578ca-operator-scripts\") pod \"placement-db-create-64lh8\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.948357 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94326915-eb21-493c-a1cb-38dee42578ca-operator-scripts\") pod \"placement-db-create-64lh8\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.964494 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.968422 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnr8h\" (UniqueName: \"kubernetes.io/projected/94326915-eb21-493c-a1cb-38dee42578ca-kube-api-access-hnr8h\") pod \"placement-db-create-64lh8\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " pod="openstack/placement-db-create-64lh8" Dec 02 10:19:19 crc kubenswrapper[4685]: I1202 10:19:19.977227 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-brv75" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.049337 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98m5k\" (UniqueName: \"kubernetes.io/projected/a98f0fc5-1d50-448e-8694-1ce6a21302ad-kube-api-access-98m5k\") pod \"placement-70ae-account-create-update-hrxr2\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.049530 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98f0fc5-1d50-448e-8694-1ce6a21302ad-operator-scripts\") pod \"placement-70ae-account-create-update-hrxr2\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.050761 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98f0fc5-1d50-448e-8694-1ce6a21302ad-operator-scripts\") pod \"placement-70ae-account-create-update-hrxr2\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.066753 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98m5k\" (UniqueName: \"kubernetes.io/projected/a98f0fc5-1d50-448e-8694-1ce6a21302ad-kube-api-access-98m5k\") pod \"placement-70ae-account-create-update-hrxr2\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.091621 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64lh8" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.157922 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-bl2km"] Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.158938 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.165671 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-bl2km"] Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.253234 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf664d50-8650-418f-b0d8-f6c03236e6c9-operator-scripts\") pod \"glance-db-create-bl2km\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.253405 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blqjs\" (UniqueName: \"kubernetes.io/projected/bf664d50-8650-418f-b0d8-f6c03236e6c9-kube-api-access-blqjs\") pod \"glance-db-create-bl2km\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.255299 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.313549 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-24e3-account-create-update-qgb5r"] Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.314789 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.319075 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.327741 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-24e3-account-create-update-qgb5r"] Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.355193 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwsg2\" (UniqueName: \"kubernetes.io/projected/ce382e1f-af3a-4231-95f4-8122d70297ac-kube-api-access-kwsg2\") pod \"glance-24e3-account-create-update-qgb5r\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.355265 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blqjs\" (UniqueName: \"kubernetes.io/projected/bf664d50-8650-418f-b0d8-f6c03236e6c9-kube-api-access-blqjs\") pod \"glance-db-create-bl2km\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.355322 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce382e1f-af3a-4231-95f4-8122d70297ac-operator-scripts\") pod \"glance-24e3-account-create-update-qgb5r\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.355408 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf664d50-8650-418f-b0d8-f6c03236e6c9-operator-scripts\") pod \"glance-db-create-bl2km\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.356300 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf664d50-8650-418f-b0d8-f6c03236e6c9-operator-scripts\") pod \"glance-db-create-bl2km\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.371902 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blqjs\" (UniqueName: \"kubernetes.io/projected/bf664d50-8650-418f-b0d8-f6c03236e6c9-kube-api-access-blqjs\") pod \"glance-db-create-bl2km\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.456936 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwsg2\" (UniqueName: \"kubernetes.io/projected/ce382e1f-af3a-4231-95f4-8122d70297ac-kube-api-access-kwsg2\") pod \"glance-24e3-account-create-update-qgb5r\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.457015 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce382e1f-af3a-4231-95f4-8122d70297ac-operator-scripts\") pod \"glance-24e3-account-create-update-qgb5r\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.457806 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce382e1f-af3a-4231-95f4-8122d70297ac-operator-scripts\") pod \"glance-24e3-account-create-update-qgb5r\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.475289 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwsg2\" (UniqueName: \"kubernetes.io/projected/ce382e1f-af3a-4231-95f4-8122d70297ac-kube-api-access-kwsg2\") pod \"glance-24e3-account-create-update-qgb5r\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.476645 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bl2km" Dec 02 10:19:20 crc kubenswrapper[4685]: I1202 10:19:20.630761 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:21 crc kubenswrapper[4685]: I1202 10:19:21.477085 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:21 crc kubenswrapper[4685]: E1202 10:19:21.477206 4685 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 10:19:21 crc kubenswrapper[4685]: E1202 10:19:21.477384 4685 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 10:19:21 crc kubenswrapper[4685]: E1202 10:19:21.477436 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift podName:8a509f21-bfbc-4240-be77-1d5ca83344cf nodeName:}" failed. No retries permitted until 2025-12-02 10:19:29.477420467 +0000 UTC m=+1061.849194611 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift") pod "swift-storage-0" (UID: "8a509f21-bfbc-4240-be77-1d5ca83344cf") : configmap "swift-ring-files" not found Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.565908 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-64lh8"] Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.587746 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-bl2km"] Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.601801 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-brv75"] Dec 02 10:19:22 crc kubenswrapper[4685]: W1202 10:19:22.605735 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e3d77d5_8563_42ce_b25a_527cf438fa69.slice/crio-50853eefd452e006b1dc55cf7bfd388a01f424be9bc51aaef005317260f59596 WatchSource:0}: Error finding container 50853eefd452e006b1dc55cf7bfd388a01f424be9bc51aaef005317260f59596: Status 404 returned error can't find the container with id 50853eefd452e006b1dc55cf7bfd388a01f424be9bc51aaef005317260f59596 Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.612107 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7827-account-create-update-5p2bw"] Dec 02 10:19:22 crc kubenswrapper[4685]: W1202 10:19:22.612595 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3da7f8da_6317_414b_8c60_be3c86223e70.slice/crio-e1ebdf73fe995de933dff4645b0084d30a24b050f6d8ae297c80b5715d1f5a10 WatchSource:0}: Error finding container e1ebdf73fe995de933dff4645b0084d30a24b050f6d8ae297c80b5715d1f5a10: Status 404 returned error can't find the container with id e1ebdf73fe995de933dff4645b0084d30a24b050f6d8ae297c80b5715d1f5a10 Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.621441 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-70ae-account-create-update-hrxr2"] Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.759117 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-24e3-account-create-update-qgb5r"] Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.765397 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7827-account-create-update-5p2bw" event={"ID":"3da7f8da-6317-414b-8c60-be3c86223e70","Type":"ContainerStarted","Data":"e1ebdf73fe995de933dff4645b0084d30a24b050f6d8ae297c80b5715d1f5a10"} Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.766869 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70ae-account-create-update-hrxr2" event={"ID":"a98f0fc5-1d50-448e-8694-1ce6a21302ad","Type":"ContainerStarted","Data":"9d26b012dc0af5fb18c4e95a013aaa21f49a2d20855403591bee9bc685c961cf"} Dec 02 10:19:22 crc kubenswrapper[4685]: W1202 10:19:22.767402 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce382e1f_af3a_4231_95f4_8122d70297ac.slice/crio-1b7b8d141a3a9aba687319c2456f4191839626eb5b7264aa64551917ef6a0704 WatchSource:0}: Error finding container 1b7b8d141a3a9aba687319c2456f4191839626eb5b7264aa64551917ef6a0704: Status 404 returned error can't find the container with id 1b7b8d141a3a9aba687319c2456f4191839626eb5b7264aa64551917ef6a0704 Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.769696 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-64lh8" event={"ID":"94326915-eb21-493c-a1cb-38dee42578ca","Type":"ContainerStarted","Data":"6910cc340b7a2bd22a1a18d14b012dbf06ea28d878320a5ac4f7beb0be11c678"} Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.771921 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-brv75" event={"ID":"6e3d77d5-8563-42ce-b25a-527cf438fa69","Type":"ContainerStarted","Data":"50853eefd452e006b1dc55cf7bfd388a01f424be9bc51aaef005317260f59596"} Dec 02 10:19:22 crc kubenswrapper[4685]: I1202 10:19:22.774328 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bl2km" event={"ID":"bf664d50-8650-418f-b0d8-f6c03236e6c9","Type":"ContainerStarted","Data":"192e1f050ba4a4ab7893a9fc0c4a9af3a9ad6ab4cae6cd8175682389bec818f8"} Dec 02 10:19:23 crc kubenswrapper[4685]: I1202 10:19:23.782095 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-24e3-account-create-update-qgb5r" event={"ID":"ce382e1f-af3a-4231-95f4-8122d70297ac","Type":"ContainerStarted","Data":"1b7b8d141a3a9aba687319c2456f4191839626eb5b7264aa64551917ef6a0704"} Dec 02 10:19:24 crc kubenswrapper[4685]: I1202 10:19:24.610870 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:24 crc kubenswrapper[4685]: I1202 10:19:24.920760 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:19:24 crc kubenswrapper[4685]: I1202 10:19:24.983122 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-n9xtt"] Dec 02 10:19:24 crc kubenswrapper[4685]: I1202 10:19:24.983349 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerName="dnsmasq-dns" containerID="cri-o://536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857" gracePeriod=10 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.771012 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.804972 4685 generic.go:334] "Generic (PLEG): container finished" podID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerID="536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.805035 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" event={"ID":"4ec4e0fd-aabb-435c-943f-3c6192679c7f","Type":"ContainerDied","Data":"536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.805059 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" event={"ID":"4ec4e0fd-aabb-435c-943f-3c6192679c7f","Type":"ContainerDied","Data":"5db13560880bf00bf26615af0e1786bde1fa09d7f1f7c365856236f84c8d98e8"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.805076 4685 scope.go:117] "RemoveContainer" containerID="536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.805081 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6f696b9-n9xtt" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.807231 4685 generic.go:334] "Generic (PLEG): container finished" podID="6e3d77d5-8563-42ce-b25a-527cf438fa69" containerID="1c8170a7bf07f7bf644aca68fb270154b24a0876d6f85a7872056351c91363c6" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.807316 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-brv75" event={"ID":"6e3d77d5-8563-42ce-b25a-527cf438fa69","Type":"ContainerDied","Data":"1c8170a7bf07f7bf644aca68fb270154b24a0876d6f85a7872056351c91363c6"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.811717 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a8e44cc8-073c-4db3-af8b-c6b18bb2c808","Type":"ContainerStarted","Data":"97ecb4486e5f904c622bf34bb128bb426d2756ef0d0dc70ec28ede77bb873643"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.811774 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a8e44cc8-073c-4db3-af8b-c6b18bb2c808","Type":"ContainerStarted","Data":"678053251ae6219024e15a62c3f410272be198829d3b50c6230e139ec53dca29"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.812926 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.820847 4685 generic.go:334] "Generic (PLEG): container finished" podID="94326915-eb21-493c-a1cb-38dee42578ca" containerID="c5c2e78d234ea9e2062dc810ec8723c7782c74817ffd92acbe83318d29c3e86e" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.820941 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-64lh8" event={"ID":"94326915-eb21-493c-a1cb-38dee42578ca","Type":"ContainerDied","Data":"c5c2e78d234ea9e2062dc810ec8723c7782c74817ffd92acbe83318d29c3e86e"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.837826 4685 generic.go:334] "Generic (PLEG): container finished" podID="a98f0fc5-1d50-448e-8694-1ce6a21302ad" containerID="7b3ae08e4e0f9264eac218d2f2c8a6bb473c9af26db10218c6e7669987670d77" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.838031 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70ae-account-create-update-hrxr2" event={"ID":"a98f0fc5-1d50-448e-8694-1ce6a21302ad","Type":"ContainerDied","Data":"7b3ae08e4e0f9264eac218d2f2c8a6bb473c9af26db10218c6e7669987670d77"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.851038 4685 scope.go:117] "RemoveContainer" containerID="8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.856508 4685 generic.go:334] "Generic (PLEG): container finished" podID="ce382e1f-af3a-4231-95f4-8122d70297ac" containerID="f03bb2d5974a9ea93123ffcea6573116bd80c2e7058d5eb8b453114de11137f7" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.856658 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-24e3-account-create-update-qgb5r" event={"ID":"ce382e1f-af3a-4231-95f4-8122d70297ac","Type":"ContainerDied","Data":"f03bb2d5974a9ea93123ffcea6573116bd80c2e7058d5eb8b453114de11137f7"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.876732 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=6.598275677 podStartE2EDuration="11.876712815s" podCreationTimestamp="2025-12-02 10:19:15 +0000 UTC" firstStartedPulling="2025-12-02 10:19:16.712697575 +0000 UTC m=+1049.084471729" lastFinishedPulling="2025-12-02 10:19:21.991134703 +0000 UTC m=+1054.362908867" observedRunningTime="2025-12-02 10:19:26.865013164 +0000 UTC m=+1059.236787308" watchObservedRunningTime="2025-12-02 10:19:26.876712815 +0000 UTC m=+1059.248486969" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.877788 4685 generic.go:334] "Generic (PLEG): container finished" podID="bf664d50-8650-418f-b0d8-f6c03236e6c9" containerID="25880f54744e792bc87dda25b37ca667ed2a9b608bd3f6c6c6c6b7926c5a552d" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.877946 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bl2km" event={"ID":"bf664d50-8650-418f-b0d8-f6c03236e6c9","Type":"ContainerDied","Data":"25880f54744e792bc87dda25b37ca667ed2a9b608bd3f6c6c6c6b7926c5a552d"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.880384 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nbrzh" event={"ID":"0b83b134-73c4-447e-99a2-a49c814e589c","Type":"ContainerStarted","Data":"06a053c806a6d498fa275040d5cc1e0f62e56d6cf0d43283e71e951c53ddba0d"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.882073 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-config\") pod \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.882146 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-ovsdbserver-nb\") pod \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.882309 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpknz\" (UniqueName: \"kubernetes.io/projected/4ec4e0fd-aabb-435c-943f-3c6192679c7f-kube-api-access-cpknz\") pod \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.882360 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-dns-svc\") pod \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\" (UID: \"4ec4e0fd-aabb-435c-943f-3c6192679c7f\") " Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.893742 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ec4e0fd-aabb-435c-943f-3c6192679c7f-kube-api-access-cpknz" (OuterVolumeSpecName: "kube-api-access-cpknz") pod "4ec4e0fd-aabb-435c-943f-3c6192679c7f" (UID: "4ec4e0fd-aabb-435c-943f-3c6192679c7f"). InnerVolumeSpecName "kube-api-access-cpknz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.896163 4685 generic.go:334] "Generic (PLEG): container finished" podID="3da7f8da-6317-414b-8c60-be3c86223e70" containerID="00c3596965ba2329d97f84cc30b9051d1b4f5c41ac1ef187899c2ab6be6e9c65" exitCode=0 Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.896286 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7827-account-create-update-5p2bw" event={"ID":"3da7f8da-6317-414b-8c60-be3c86223e70","Type":"ContainerDied","Data":"00c3596965ba2329d97f84cc30b9051d1b4f5c41ac1ef187899c2ab6be6e9c65"} Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.896545 4685 scope.go:117] "RemoveContainer" containerID="536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857" Dec 02 10:19:26 crc kubenswrapper[4685]: E1202 10:19:26.897115 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857\": container with ID starting with 536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857 not found: ID does not exist" containerID="536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.897220 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857"} err="failed to get container status \"536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857\": rpc error: code = NotFound desc = could not find container \"536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857\": container with ID starting with 536490e8cd7e0da318ac1e0fb6eecd713f10d710c67ac86694ae10ef8ad6c857 not found: ID does not exist" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.897301 4685 scope.go:117] "RemoveContainer" containerID="8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f" Dec 02 10:19:26 crc kubenswrapper[4685]: E1202 10:19:26.897778 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f\": container with ID starting with 8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f not found: ID does not exist" containerID="8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.897856 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f"} err="failed to get container status \"8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f\": rpc error: code = NotFound desc = could not find container \"8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f\": container with ID starting with 8b8021b9c3d7f8d1026b12baa9defdfe0a45bfba22f30047822915181ed82a9f not found: ID does not exist" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.963330 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4ec4e0fd-aabb-435c-943f-3c6192679c7f" (UID: "4ec4e0fd-aabb-435c-943f-3c6192679c7f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.964926 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-nbrzh" podStartSLOduration=6.225794237 podStartE2EDuration="12.964909907s" podCreationTimestamp="2025-12-02 10:19:14 +0000 UTC" firstStartedPulling="2025-12-02 10:19:15.249145195 +0000 UTC m=+1047.620919349" lastFinishedPulling="2025-12-02 10:19:21.988260865 +0000 UTC m=+1054.360035019" observedRunningTime="2025-12-02 10:19:26.937039672 +0000 UTC m=+1059.308813826" watchObservedRunningTime="2025-12-02 10:19:26.964909907 +0000 UTC m=+1059.336684061" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.973112 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4ec4e0fd-aabb-435c-943f-3c6192679c7f" (UID: "4ec4e0fd-aabb-435c-943f-3c6192679c7f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.973247 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-config" (OuterVolumeSpecName: "config") pod "4ec4e0fd-aabb-435c-943f-3c6192679c7f" (UID: "4ec4e0fd-aabb-435c-943f-3c6192679c7f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.984368 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpknz\" (UniqueName: \"kubernetes.io/projected/4ec4e0fd-aabb-435c-943f-3c6192679c7f-kube-api-access-cpknz\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.984403 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.984413 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:26 crc kubenswrapper[4685]: I1202 10:19:26.984421 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4ec4e0fd-aabb-435c-943f-3c6192679c7f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:27 crc kubenswrapper[4685]: I1202 10:19:27.142394 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-n9xtt"] Dec 02 10:19:27 crc kubenswrapper[4685]: I1202 10:19:27.149972 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6f696b9-n9xtt"] Dec 02 10:19:27 crc kubenswrapper[4685]: I1202 10:19:27.911614 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" path="/var/lib/kubelet/pods/4ec4e0fd-aabb-435c-943f-3c6192679c7f/volumes" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.340902 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.412654 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce382e1f-af3a-4231-95f4-8122d70297ac-operator-scripts\") pod \"ce382e1f-af3a-4231-95f4-8122d70297ac\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.412715 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwsg2\" (UniqueName: \"kubernetes.io/projected/ce382e1f-af3a-4231-95f4-8122d70297ac-kube-api-access-kwsg2\") pod \"ce382e1f-af3a-4231-95f4-8122d70297ac\" (UID: \"ce382e1f-af3a-4231-95f4-8122d70297ac\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.418379 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce382e1f-af3a-4231-95f4-8122d70297ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ce382e1f-af3a-4231-95f4-8122d70297ac" (UID: "ce382e1f-af3a-4231-95f4-8122d70297ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.427239 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce382e1f-af3a-4231-95f4-8122d70297ac-kube-api-access-kwsg2" (OuterVolumeSpecName: "kube-api-access-kwsg2") pod "ce382e1f-af3a-4231-95f4-8122d70297ac" (UID: "ce382e1f-af3a-4231-95f4-8122d70297ac"). InnerVolumeSpecName "kube-api-access-kwsg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.516006 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce382e1f-af3a-4231-95f4-8122d70297ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.516056 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwsg2\" (UniqueName: \"kubernetes.io/projected/ce382e1f-af3a-4231-95f4-8122d70297ac-kube-api-access-kwsg2\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.561774 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64lh8" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.571453 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.588549 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-brv75" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.599805 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bl2km" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.617495 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e3d77d5-8563-42ce-b25a-527cf438fa69-operator-scripts\") pod \"6e3d77d5-8563-42ce-b25a-527cf438fa69\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.617621 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnr8h\" (UniqueName: \"kubernetes.io/projected/94326915-eb21-493c-a1cb-38dee42578ca-kube-api-access-hnr8h\") pod \"94326915-eb21-493c-a1cb-38dee42578ca\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.617669 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98f0fc5-1d50-448e-8694-1ce6a21302ad-operator-scripts\") pod \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.617717 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mddcf\" (UniqueName: \"kubernetes.io/projected/6e3d77d5-8563-42ce-b25a-527cf438fa69-kube-api-access-mddcf\") pod \"6e3d77d5-8563-42ce-b25a-527cf438fa69\" (UID: \"6e3d77d5-8563-42ce-b25a-527cf438fa69\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.617812 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98m5k\" (UniqueName: \"kubernetes.io/projected/a98f0fc5-1d50-448e-8694-1ce6a21302ad-kube-api-access-98m5k\") pod \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\" (UID: \"a98f0fc5-1d50-448e-8694-1ce6a21302ad\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.617851 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94326915-eb21-493c-a1cb-38dee42578ca-operator-scripts\") pod \"94326915-eb21-493c-a1cb-38dee42578ca\" (UID: \"94326915-eb21-493c-a1cb-38dee42578ca\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.618856 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94326915-eb21-493c-a1cb-38dee42578ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "94326915-eb21-493c-a1cb-38dee42578ca" (UID: "94326915-eb21-493c-a1cb-38dee42578ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.619276 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e3d77d5-8563-42ce-b25a-527cf438fa69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6e3d77d5-8563-42ce-b25a-527cf438fa69" (UID: "6e3d77d5-8563-42ce-b25a-527cf438fa69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.619978 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.621039 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a98f0fc5-1d50-448e-8694-1ce6a21302ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a98f0fc5-1d50-448e-8694-1ce6a21302ad" (UID: "a98f0fc5-1d50-448e-8694-1ce6a21302ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.628524 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a98f0fc5-1d50-448e-8694-1ce6a21302ad-kube-api-access-98m5k" (OuterVolumeSpecName: "kube-api-access-98m5k") pod "a98f0fc5-1d50-448e-8694-1ce6a21302ad" (UID: "a98f0fc5-1d50-448e-8694-1ce6a21302ad"). InnerVolumeSpecName "kube-api-access-98m5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.628597 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94326915-eb21-493c-a1cb-38dee42578ca-kube-api-access-hnr8h" (OuterVolumeSpecName: "kube-api-access-hnr8h") pod "94326915-eb21-493c-a1cb-38dee42578ca" (UID: "94326915-eb21-493c-a1cb-38dee42578ca"). InnerVolumeSpecName "kube-api-access-hnr8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.632703 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e3d77d5-8563-42ce-b25a-527cf438fa69-kube-api-access-mddcf" (OuterVolumeSpecName: "kube-api-access-mddcf") pod "6e3d77d5-8563-42ce-b25a-527cf438fa69" (UID: "6e3d77d5-8563-42ce-b25a-527cf438fa69"). InnerVolumeSpecName "kube-api-access-mddcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.719299 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blqjs\" (UniqueName: \"kubernetes.io/projected/bf664d50-8650-418f-b0d8-f6c03236e6c9-kube-api-access-blqjs\") pod \"bf664d50-8650-418f-b0d8-f6c03236e6c9\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.719472 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da7f8da-6317-414b-8c60-be3c86223e70-operator-scripts\") pod \"3da7f8da-6317-414b-8c60-be3c86223e70\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.719591 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf664d50-8650-418f-b0d8-f6c03236e6c9-operator-scripts\") pod \"bf664d50-8650-418f-b0d8-f6c03236e6c9\" (UID: \"bf664d50-8650-418f-b0d8-f6c03236e6c9\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.719623 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hth9\" (UniqueName: \"kubernetes.io/projected/3da7f8da-6317-414b-8c60-be3c86223e70-kube-api-access-9hth9\") pod \"3da7f8da-6317-414b-8c60-be3c86223e70\" (UID: \"3da7f8da-6317-414b-8c60-be3c86223e70\") " Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720002 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf664d50-8650-418f-b0d8-f6c03236e6c9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bf664d50-8650-418f-b0d8-f6c03236e6c9" (UID: "bf664d50-8650-418f-b0d8-f6c03236e6c9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720015 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3da7f8da-6317-414b-8c60-be3c86223e70-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3da7f8da-6317-414b-8c60-be3c86223e70" (UID: "3da7f8da-6317-414b-8c60-be3c86223e70"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720594 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da7f8da-6317-414b-8c60-be3c86223e70-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720620 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnr8h\" (UniqueName: \"kubernetes.io/projected/94326915-eb21-493c-a1cb-38dee42578ca-kube-api-access-hnr8h\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720636 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98f0fc5-1d50-448e-8694-1ce6a21302ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720648 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mddcf\" (UniqueName: \"kubernetes.io/projected/6e3d77d5-8563-42ce-b25a-527cf438fa69-kube-api-access-mddcf\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720660 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf664d50-8650-418f-b0d8-f6c03236e6c9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720673 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98m5k\" (UniqueName: \"kubernetes.io/projected/a98f0fc5-1d50-448e-8694-1ce6a21302ad-kube-api-access-98m5k\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720687 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94326915-eb21-493c-a1cb-38dee42578ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.720700 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6e3d77d5-8563-42ce-b25a-527cf438fa69-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.721744 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf664d50-8650-418f-b0d8-f6c03236e6c9-kube-api-access-blqjs" (OuterVolumeSpecName: "kube-api-access-blqjs") pod "bf664d50-8650-418f-b0d8-f6c03236e6c9" (UID: "bf664d50-8650-418f-b0d8-f6c03236e6c9"). InnerVolumeSpecName "kube-api-access-blqjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.723220 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3da7f8da-6317-414b-8c60-be3c86223e70-kube-api-access-9hth9" (OuterVolumeSpecName: "kube-api-access-9hth9") pod "3da7f8da-6317-414b-8c60-be3c86223e70" (UID: "3da7f8da-6317-414b-8c60-be3c86223e70"). InnerVolumeSpecName "kube-api-access-9hth9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.823749 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hth9\" (UniqueName: \"kubernetes.io/projected/3da7f8da-6317-414b-8c60-be3c86223e70-kube-api-access-9hth9\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.823798 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blqjs\" (UniqueName: \"kubernetes.io/projected/bf664d50-8650-418f-b0d8-f6c03236e6c9-kube-api-access-blqjs\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.918508 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-bl2km" event={"ID":"bf664d50-8650-418f-b0d8-f6c03236e6c9","Type":"ContainerDied","Data":"192e1f050ba4a4ab7893a9fc0c4a9af3a9ad6ab4cae6cd8175682389bec818f8"} Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.918571 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="192e1f050ba4a4ab7893a9fc0c4a9af3a9ad6ab4cae6cd8175682389bec818f8" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.918642 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-bl2km" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.928159 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7827-account-create-update-5p2bw" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.928178 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7827-account-create-update-5p2bw" event={"ID":"3da7f8da-6317-414b-8c60-be3c86223e70","Type":"ContainerDied","Data":"e1ebdf73fe995de933dff4645b0084d30a24b050f6d8ae297c80b5715d1f5a10"} Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.928648 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1ebdf73fe995de933dff4645b0084d30a24b050f6d8ae297c80b5715d1f5a10" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.936236 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-70ae-account-create-update-hrxr2" event={"ID":"a98f0fc5-1d50-448e-8694-1ce6a21302ad","Type":"ContainerDied","Data":"9d26b012dc0af5fb18c4e95a013aaa21f49a2d20855403591bee9bc685c961cf"} Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.936310 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-70ae-account-create-update-hrxr2" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.936331 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d26b012dc0af5fb18c4e95a013aaa21f49a2d20855403591bee9bc685c961cf" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.940975 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-64lh8" event={"ID":"94326915-eb21-493c-a1cb-38dee42578ca","Type":"ContainerDied","Data":"6910cc340b7a2bd22a1a18d14b012dbf06ea28d878320a5ac4f7beb0be11c678"} Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.941006 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6910cc340b7a2bd22a1a18d14b012dbf06ea28d878320a5ac4f7beb0be11c678" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.941063 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-64lh8" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.943173 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-brv75" event={"ID":"6e3d77d5-8563-42ce-b25a-527cf438fa69","Type":"ContainerDied","Data":"50853eefd452e006b1dc55cf7bfd388a01f424be9bc51aaef005317260f59596"} Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.943210 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50853eefd452e006b1dc55cf7bfd388a01f424be9bc51aaef005317260f59596" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.943307 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-brv75" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.948787 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-24e3-account-create-update-qgb5r" Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.948818 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-24e3-account-create-update-qgb5r" event={"ID":"ce382e1f-af3a-4231-95f4-8122d70297ac","Type":"ContainerDied","Data":"1b7b8d141a3a9aba687319c2456f4191839626eb5b7264aa64551917ef6a0704"} Dec 02 10:19:28 crc kubenswrapper[4685]: I1202 10:19:28.949087 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b7b8d141a3a9aba687319c2456f4191839626eb5b7264aa64551917ef6a0704" Dec 02 10:19:29 crc kubenswrapper[4685]: I1202 10:19:29.534279 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:29 crc kubenswrapper[4685]: E1202 10:19:29.534639 4685 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 02 10:19:29 crc kubenswrapper[4685]: E1202 10:19:29.534657 4685 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 02 10:19:29 crc kubenswrapper[4685]: E1202 10:19:29.534703 4685 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift podName:8a509f21-bfbc-4240-be77-1d5ca83344cf nodeName:}" failed. No retries permitted until 2025-12-02 10:19:45.534686605 +0000 UTC m=+1077.906460759 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift") pod "swift-storage-0" (UID: "8a509f21-bfbc-4240-be77-1d5ca83344cf") : configmap "swift-ring-files" not found Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.760538 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-lwcqt"] Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.760974 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf664d50-8650-418f-b0d8-f6c03236e6c9" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.760991 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf664d50-8650-418f-b0d8-f6c03236e6c9" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761012 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98f0fc5-1d50-448e-8694-1ce6a21302ad" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761020 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98f0fc5-1d50-448e-8694-1ce6a21302ad" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761032 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerName="init" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761041 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerName="init" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761059 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e3d77d5-8563-42ce-b25a-527cf438fa69" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761067 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e3d77d5-8563-42ce-b25a-527cf438fa69" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761086 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94326915-eb21-493c-a1cb-38dee42578ca" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761093 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="94326915-eb21-493c-a1cb-38dee42578ca" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761104 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerName="dnsmasq-dns" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761113 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerName="dnsmasq-dns" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761124 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da7f8da-6317-414b-8c60-be3c86223e70" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761133 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da7f8da-6317-414b-8c60-be3c86223e70" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: E1202 10:19:30.761144 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce382e1f-af3a-4231-95f4-8122d70297ac" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761152 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce382e1f-af3a-4231-95f4-8122d70297ac" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761341 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf664d50-8650-418f-b0d8-f6c03236e6c9" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761354 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ec4e0fd-aabb-435c-943f-3c6192679c7f" containerName="dnsmasq-dns" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761364 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce382e1f-af3a-4231-95f4-8122d70297ac" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761375 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e3d77d5-8563-42ce-b25a-527cf438fa69" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761386 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da7f8da-6317-414b-8c60-be3c86223e70" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761395 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a98f0fc5-1d50-448e-8694-1ce6a21302ad" containerName="mariadb-account-create-update" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.761417 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="94326915-eb21-493c-a1cb-38dee42578ca" containerName="mariadb-database-create" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.762139 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.764002 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.764002 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dfpbr" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.771717 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lwcqt"] Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.856935 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-db-sync-config-data\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.857547 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96v8n\" (UniqueName: \"kubernetes.io/projected/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-kube-api-access-96v8n\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.857873 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-config-data\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.857979 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-combined-ca-bundle\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.959306 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-config-data\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.959593 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-combined-ca-bundle\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.959720 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-db-sync-config-data\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.959821 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96v8n\" (UniqueName: \"kubernetes.io/projected/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-kube-api-access-96v8n\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.965975 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-config-data\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.968375 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-combined-ca-bundle\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.969410 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-db-sync-config-data\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:30 crc kubenswrapper[4685]: I1202 10:19:30.979219 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96v8n\" (UniqueName: \"kubernetes.io/projected/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-kube-api-access-96v8n\") pod \"glance-db-sync-lwcqt\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:31 crc kubenswrapper[4685]: I1202 10:19:31.093726 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:31 crc kubenswrapper[4685]: W1202 10:19:31.483586 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d2bb8b8_1101_4788_bb6e_2e80d7a0e84c.slice/crio-6531fdf3f4485740b662478489afc9de6faced568ec61e05ee15f4451e38bc76 WatchSource:0}: Error finding container 6531fdf3f4485740b662478489afc9de6faced568ec61e05ee15f4451e38bc76: Status 404 returned error can't find the container with id 6531fdf3f4485740b662478489afc9de6faced568ec61e05ee15f4451e38bc76 Dec 02 10:19:31 crc kubenswrapper[4685]: I1202 10:19:31.483961 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-lwcqt"] Dec 02 10:19:31 crc kubenswrapper[4685]: I1202 10:19:31.965989 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lwcqt" event={"ID":"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c","Type":"ContainerStarted","Data":"6531fdf3f4485740b662478489afc9de6faced568ec61e05ee15f4451e38bc76"} Dec 02 10:19:33 crc kubenswrapper[4685]: I1202 10:19:33.981867 4685 generic.go:334] "Generic (PLEG): container finished" podID="0b83b134-73c4-447e-99a2-a49c814e589c" containerID="06a053c806a6d498fa275040d5cc1e0f62e56d6cf0d43283e71e951c53ddba0d" exitCode=0 Dec 02 10:19:33 crc kubenswrapper[4685]: I1202 10:19:33.981964 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nbrzh" event={"ID":"0b83b134-73c4-447e-99a2-a49c814e589c","Type":"ContainerDied","Data":"06a053c806a6d498fa275040d5cc1e0f62e56d6cf0d43283e71e951c53ddba0d"} Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.346931 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-2sntp" podUID="ee7d3162-98e7-4af9-aad0-2098e23d1743" containerName="ovn-controller" probeResult="failure" output=< Dec 02 10:19:34 crc kubenswrapper[4685]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 02 10:19:34 crc kubenswrapper[4685]: > Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.363502 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.368341 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-c8mrc" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.596929 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2sntp-config-tdkcr"] Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.598049 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.600658 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.647563 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sntp-config-tdkcr"] Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.733399 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-additional-scripts\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.733478 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-log-ovn\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.733609 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.733651 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-scripts\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.733710 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n996t\" (UniqueName: \"kubernetes.io/projected/acbd9d0c-456a-4517-9d1a-14547d2b19ba-kube-api-access-n996t\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.733736 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run-ovn\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835096 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n996t\" (UniqueName: \"kubernetes.io/projected/acbd9d0c-456a-4517-9d1a-14547d2b19ba-kube-api-access-n996t\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835158 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run-ovn\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835195 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-additional-scripts\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835227 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-log-ovn\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835268 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835301 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-scripts\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835742 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-log-ovn\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835742 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run-ovn\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.835764 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.836132 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-additional-scripts\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.837431 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-scripts\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.864373 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n996t\" (UniqueName: \"kubernetes.io/projected/acbd9d0c-456a-4517-9d1a-14547d2b19ba-kube-api-access-n996t\") pod \"ovn-controller-2sntp-config-tdkcr\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:34 crc kubenswrapper[4685]: I1202 10:19:34.942245 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.402033 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445055 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-combined-ca-bundle\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445157 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-dispersionconf\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445230 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-swiftconf\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445317 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-ring-data-devices\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445352 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9djk\" (UniqueName: \"kubernetes.io/projected/0b83b134-73c4-447e-99a2-a49c814e589c-kube-api-access-b9djk\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445389 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-scripts\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.445470 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b83b134-73c4-447e-99a2-a49c814e589c-etc-swift\") pod \"0b83b134-73c4-447e-99a2-a49c814e589c\" (UID: \"0b83b134-73c4-447e-99a2-a49c814e589c\") " Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.446156 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.447864 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b83b134-73c4-447e-99a2-a49c814e589c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.452237 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b83b134-73c4-447e-99a2-a49c814e589c-kube-api-access-b9djk" (OuterVolumeSpecName: "kube-api-access-b9djk") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "kube-api-access-b9djk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.457951 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.472647 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.480305 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.492379 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-scripts" (OuterVolumeSpecName: "scripts") pod "0b83b134-73c4-447e-99a2-a49c814e589c" (UID: "0b83b134-73c4-447e-99a2-a49c814e589c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547468 4685 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b83b134-73c4-447e-99a2-a49c814e589c-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547611 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547630 4685 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547642 4685 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b83b134-73c4-447e-99a2-a49c814e589c-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547654 4685 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547695 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9djk\" (UniqueName: \"kubernetes.io/projected/0b83b134-73c4-447e-99a2-a49c814e589c-kube-api-access-b9djk\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.547711 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b83b134-73c4-447e-99a2-a49c814e589c-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:35 crc kubenswrapper[4685]: I1202 10:19:35.570339 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sntp-config-tdkcr"] Dec 02 10:19:35 crc kubenswrapper[4685]: W1202 10:19:35.580896 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacbd9d0c_456a_4517_9d1a_14547d2b19ba.slice/crio-e07f89cecc92b27695f05476bcf4e05796248e6a8015337f902826a633ef8b5e WatchSource:0}: Error finding container e07f89cecc92b27695f05476bcf4e05796248e6a8015337f902826a633ef8b5e: Status 404 returned error can't find the container with id e07f89cecc92b27695f05476bcf4e05796248e6a8015337f902826a633ef8b5e Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.000792 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sntp-config-tdkcr" event={"ID":"acbd9d0c-456a-4517-9d1a-14547d2b19ba","Type":"ContainerStarted","Data":"37c7e57bf2150e4b0d10d79485673c6569b052aacfbc89676ebc71ae67d21d59"} Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.001119 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sntp-config-tdkcr" event={"ID":"acbd9d0c-456a-4517-9d1a-14547d2b19ba","Type":"ContainerStarted","Data":"e07f89cecc92b27695f05476bcf4e05796248e6a8015337f902826a633ef8b5e"} Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.002653 4685 generic.go:334] "Generic (PLEG): container finished" podID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerID="b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871" exitCode=0 Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.002709 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0366597-4ac4-482d-ba5f-dfa2956d1fb3","Type":"ContainerDied","Data":"b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871"} Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.008456 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-nbrzh" event={"ID":"0b83b134-73c4-447e-99a2-a49c814e589c","Type":"ContainerDied","Data":"727e151e24975d16a2d4dbd1d44ad16e9e74f57c07cfa73aeb3421f782135142"} Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.008503 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="727e151e24975d16a2d4dbd1d44ad16e9e74f57c07cfa73aeb3421f782135142" Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.008586 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-nbrzh" Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.038327 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-2sntp-config-tdkcr" podStartSLOduration=2.038305908 podStartE2EDuration="2.038305908s" podCreationTimestamp="2025-12-02 10:19:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:36.037131545 +0000 UTC m=+1068.408905699" watchObservedRunningTime="2025-12-02 10:19:36.038305908 +0000 UTC m=+1068.410080072" Dec 02 10:19:36 crc kubenswrapper[4685]: I1202 10:19:36.338651 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 02 10:19:37 crc kubenswrapper[4685]: I1202 10:19:37.017739 4685 generic.go:334] "Generic (PLEG): container finished" podID="acbd9d0c-456a-4517-9d1a-14547d2b19ba" containerID="37c7e57bf2150e4b0d10d79485673c6569b052aacfbc89676ebc71ae67d21d59" exitCode=0 Dec 02 10:19:37 crc kubenswrapper[4685]: I1202 10:19:37.018437 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sntp-config-tdkcr" event={"ID":"acbd9d0c-456a-4517-9d1a-14547d2b19ba","Type":"ContainerDied","Data":"37c7e57bf2150e4b0d10d79485673c6569b052aacfbc89676ebc71ae67d21d59"} Dec 02 10:19:37 crc kubenswrapper[4685]: I1202 10:19:37.021076 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0366597-4ac4-482d-ba5f-dfa2956d1fb3","Type":"ContainerStarted","Data":"1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c"} Dec 02 10:19:37 crc kubenswrapper[4685]: I1202 10:19:37.021929 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:19:37 crc kubenswrapper[4685]: I1202 10:19:37.123988 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.524510521 podStartE2EDuration="1m12.12396172s" podCreationTimestamp="2025-12-02 10:18:25 +0000 UTC" firstStartedPulling="2025-12-02 10:18:27.74024511 +0000 UTC m=+1000.112019264" lastFinishedPulling="2025-12-02 10:19:02.339696309 +0000 UTC m=+1034.711470463" observedRunningTime="2025-12-02 10:19:37.109079942 +0000 UTC m=+1069.480854116" watchObservedRunningTime="2025-12-02 10:19:37.12396172 +0000 UTC m=+1069.495735874" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.450670 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641010 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-additional-scripts\") pod \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641316 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-scripts\") pod \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641368 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run\") pod \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641405 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n996t\" (UniqueName: \"kubernetes.io/projected/acbd9d0c-456a-4517-9d1a-14547d2b19ba-kube-api-access-n996t\") pod \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641478 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run-ovn\") pod \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641524 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-log-ovn\") pod \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\" (UID: \"acbd9d0c-456a-4517-9d1a-14547d2b19ba\") " Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641697 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run" (OuterVolumeSpecName: "var-run") pod "acbd9d0c-456a-4517-9d1a-14547d2b19ba" (UID: "acbd9d0c-456a-4517-9d1a-14547d2b19ba"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641747 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "acbd9d0c-456a-4517-9d1a-14547d2b19ba" (UID: "acbd9d0c-456a-4517-9d1a-14547d2b19ba"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641868 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "acbd9d0c-456a-4517-9d1a-14547d2b19ba" (UID: "acbd9d0c-456a-4517-9d1a-14547d2b19ba"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.641994 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "acbd9d0c-456a-4517-9d1a-14547d2b19ba" (UID: "acbd9d0c-456a-4517-9d1a-14547d2b19ba"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.642035 4685 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.642216 4685 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.642298 4685 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.642637 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-scripts" (OuterVolumeSpecName: "scripts") pod "acbd9d0c-456a-4517-9d1a-14547d2b19ba" (UID: "acbd9d0c-456a-4517-9d1a-14547d2b19ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.665187 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acbd9d0c-456a-4517-9d1a-14547d2b19ba-kube-api-access-n996t" (OuterVolumeSpecName: "kube-api-access-n996t") pod "acbd9d0c-456a-4517-9d1a-14547d2b19ba" (UID: "acbd9d0c-456a-4517-9d1a-14547d2b19ba"). InnerVolumeSpecName "kube-api-access-n996t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.743767 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acbd9d0c-456a-4517-9d1a-14547d2b19ba-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.744208 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n996t\" (UniqueName: \"kubernetes.io/projected/acbd9d0c-456a-4517-9d1a-14547d2b19ba-kube-api-access-n996t\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:38 crc kubenswrapper[4685]: I1202 10:19:38.744231 4685 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/acbd9d0c-456a-4517-9d1a-14547d2b19ba-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.057887 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sntp-config-tdkcr" event={"ID":"acbd9d0c-456a-4517-9d1a-14547d2b19ba","Type":"ContainerDied","Data":"e07f89cecc92b27695f05476bcf4e05796248e6a8015337f902826a633ef8b5e"} Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.057933 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e07f89cecc92b27695f05476bcf4e05796248e6a8015337f902826a633ef8b5e" Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.057998 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sntp-config-tdkcr" Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.149637 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2sntp-config-tdkcr"] Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.160715 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-2sntp-config-tdkcr"] Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.377888 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-2sntp" Dec 02 10:19:39 crc kubenswrapper[4685]: I1202 10:19:39.910960 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acbd9d0c-456a-4517-9d1a-14547d2b19ba" path="/var/lib/kubelet/pods/acbd9d0c-456a-4517-9d1a-14547d2b19ba/volumes" Dec 02 10:19:42 crc kubenswrapper[4685]: I1202 10:19:42.147503 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:19:42 crc kubenswrapper[4685]: I1202 10:19:42.147821 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:19:45 crc kubenswrapper[4685]: I1202 10:19:45.106824 4685 generic.go:334] "Generic (PLEG): container finished" podID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerID="ab362a7a28d7c14f4bc121a5fe40bd1d254d7dfc824d5973af3c51bd6cc6e44b" exitCode=0 Dec 02 10:19:45 crc kubenswrapper[4685]: I1202 10:19:45.106907 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"72a08bf5-82d6-48e0-a188-c7ac46ba22b4","Type":"ContainerDied","Data":"ab362a7a28d7c14f4bc121a5fe40bd1d254d7dfc824d5973af3c51bd6cc6e44b"} Dec 02 10:19:45 crc kubenswrapper[4685]: I1202 10:19:45.562964 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:45 crc kubenswrapper[4685]: I1202 10:19:45.568947 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8a509f21-bfbc-4240-be77-1d5ca83344cf-etc-swift\") pod \"swift-storage-0\" (UID: \"8a509f21-bfbc-4240-be77-1d5ca83344cf\") " pod="openstack/swift-storage-0" Dec 02 10:19:45 crc kubenswrapper[4685]: I1202 10:19:45.766172 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 02 10:19:46 crc kubenswrapper[4685]: I1202 10:19:46.123461 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"72a08bf5-82d6-48e0-a188-c7ac46ba22b4","Type":"ContainerStarted","Data":"567e4b57d73123f06069c5a2696b211119e19f6aaf789819f4e76abe39df067b"} Dec 02 10:19:46 crc kubenswrapper[4685]: I1202 10:19:46.126869 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 10:19:46 crc kubenswrapper[4685]: I1202 10:19:46.152414 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371955.702377 podStartE2EDuration="1m21.152398378s" podCreationTimestamp="2025-12-02 10:18:25 +0000 UTC" firstStartedPulling="2025-12-02 10:18:27.404983473 +0000 UTC m=+999.776757627" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:46.149370465 +0000 UTC m=+1078.521144639" watchObservedRunningTime="2025-12-02 10:19:46.152398378 +0000 UTC m=+1078.524172532" Dec 02 10:19:46 crc kubenswrapper[4685]: I1202 10:19:46.374253 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 02 10:19:46 crc kubenswrapper[4685]: W1202 10:19:46.382683 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a509f21_bfbc_4240_be77_1d5ca83344cf.slice/crio-4e7888e63569d354c933a53859c874940057794c8db7f3a291bd61a07cc7baa4 WatchSource:0}: Error finding container 4e7888e63569d354c933a53859c874940057794c8db7f3a291bd61a07cc7baa4: Status 404 returned error can't find the container with id 4e7888e63569d354c933a53859c874940057794c8db7f3a291bd61a07cc7baa4 Dec 02 10:19:46 crc kubenswrapper[4685]: I1202 10:19:46.993791 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:19:47 crc kubenswrapper[4685]: I1202 10:19:47.133584 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lwcqt" event={"ID":"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c","Type":"ContainerStarted","Data":"882b3af711315c3461e618ab409db4a057393088741a46e2fecad8e14071c331"} Dec 02 10:19:47 crc kubenswrapper[4685]: I1202 10:19:47.135825 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"4e7888e63569d354c933a53859c874940057794c8db7f3a291bd61a07cc7baa4"} Dec 02 10:19:47 crc kubenswrapper[4685]: I1202 10:19:47.153614 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-lwcqt" podStartSLOduration=2.8209373170000003 podStartE2EDuration="17.153589131s" podCreationTimestamp="2025-12-02 10:19:30 +0000 UTC" firstStartedPulling="2025-12-02 10:19:31.486324008 +0000 UTC m=+1063.858098162" lastFinishedPulling="2025-12-02 10:19:45.818975822 +0000 UTC m=+1078.190749976" observedRunningTime="2025-12-02 10:19:47.146297251 +0000 UTC m=+1079.518071405" watchObservedRunningTime="2025-12-02 10:19:47.153589131 +0000 UTC m=+1079.525363305" Dec 02 10:19:48 crc kubenswrapper[4685]: I1202 10:19:48.199312 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"7683544a9104a1c1189c5696f0436e177e6a87c11cbceb65e75fbd0ca7543802"} Dec 02 10:19:48 crc kubenswrapper[4685]: I1202 10:19:48.200365 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"4d83745a615efec571f8894f98b531063639c1d28603bf74ef0555cf3bd4215f"} Dec 02 10:19:49 crc kubenswrapper[4685]: I1202 10:19:49.208696 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"b5029b9e89b001b71cb0fa29f53501560c086500036171eaa6ca54f706e64e1a"} Dec 02 10:19:49 crc kubenswrapper[4685]: I1202 10:19:49.208742 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"d5353ae7a17acec379763968a24764428f5e15b42bd8987af351780a61f0f1ba"} Dec 02 10:19:51 crc kubenswrapper[4685]: I1202 10:19:51.234803 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"70d5c8a6bfa063de441b1d6462e1941c19f57687823104603705af730d377da7"} Dec 02 10:19:51 crc kubenswrapper[4685]: I1202 10:19:51.236540 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"a0c35b58093052eedaf30eeb696137371a7764194738ea24b5a15724e97de07c"} Dec 02 10:19:51 crc kubenswrapper[4685]: I1202 10:19:51.236671 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"04c0574889bedd2adb9a1e0e73b59f267b0930d1f9ca58351b5b2f3554ee82da"} Dec 02 10:19:51 crc kubenswrapper[4685]: I1202 10:19:51.236776 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"a3d9151e6fbd1a1d48bede86f4856994f69f828839bb8ca036e7b0678feaed3b"} Dec 02 10:19:52 crc kubenswrapper[4685]: I1202 10:19:52.249592 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"f278f26983760d38b29027f0c372d7f3743c57b3952bbbf7f337fdc93070d38f"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.266195 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"4ef25f01d421b5dc3ddf9353badc059777e89b9f107fa23d6c106ce8694ace56"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.266546 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"5e5c26863dd5f8a7666a931541745778b3b06020d549264e3d92f54a39808fb3"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.266581 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"fb225c76f0a4f3c4319e235f02b4ffbe7f38dafd0cacf48116ccb909caf9b719"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.266595 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"145ff6d8f56bd3c6ab2d6b86d81ff7a59c0ac773f64e1cd176747ddce7eb4421"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.266608 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"524fb3d976677345354619e3ff61a2af460bd2ee14b8ade89092ae9ecc56dd1f"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.266619 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"8a509f21-bfbc-4240-be77-1d5ca83344cf","Type":"ContainerStarted","Data":"d28b112b9f234b59764fa3b6596c8cdbf2ee159671f73c306089bf124c51be8d"} Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.313155 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=35.731980935 podStartE2EDuration="41.313127567s" podCreationTimestamp="2025-12-02 10:19:12 +0000 UTC" firstStartedPulling="2025-12-02 10:19:46.3847962 +0000 UTC m=+1078.756570354" lastFinishedPulling="2025-12-02 10:19:51.965942832 +0000 UTC m=+1084.337716986" observedRunningTime="2025-12-02 10:19:53.307518093 +0000 UTC m=+1085.679292267" watchObservedRunningTime="2025-12-02 10:19:53.313127567 +0000 UTC m=+1085.684901721" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.666840 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-j99xz"] Dec 02 10:19:53 crc kubenswrapper[4685]: E1202 10:19:53.667430 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acbd9d0c-456a-4517-9d1a-14547d2b19ba" containerName="ovn-config" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.667446 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="acbd9d0c-456a-4517-9d1a-14547d2b19ba" containerName="ovn-config" Dec 02 10:19:53 crc kubenswrapper[4685]: E1202 10:19:53.667458 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b83b134-73c4-447e-99a2-a49c814e589c" containerName="swift-ring-rebalance" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.667465 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b83b134-73c4-447e-99a2-a49c814e589c" containerName="swift-ring-rebalance" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.667667 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b83b134-73c4-447e-99a2-a49c814e589c" containerName="swift-ring-rebalance" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.667683 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="acbd9d0c-456a-4517-9d1a-14547d2b19ba" containerName="ovn-config" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.668493 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.670883 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.691488 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-j99xz"] Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.791490 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.791603 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.791645 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.791671 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s2p4\" (UniqueName: \"kubernetes.io/projected/393f5a5c-ca19-44e2-968c-b66c08df4684-kube-api-access-4s2p4\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.791699 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-config\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.791767 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-svc\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.896013 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-svc\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.896085 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.896143 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.896194 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.896221 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s2p4\" (UniqueName: \"kubernetes.io/projected/393f5a5c-ca19-44e2-968c-b66c08df4684-kube-api-access-4s2p4\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.896248 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-config\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.897099 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-svc\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.897138 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-config\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.897528 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.898242 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.900355 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.944040 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s2p4\" (UniqueName: \"kubernetes.io/projected/393f5a5c-ca19-44e2-968c-b66c08df4684-kube-api-access-4s2p4\") pod \"dnsmasq-dns-764c5664d7-j99xz\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:53 crc kubenswrapper[4685]: I1202 10:19:53.989475 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:54 crc kubenswrapper[4685]: I1202 10:19:54.512886 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-j99xz"] Dec 02 10:19:54 crc kubenswrapper[4685]: W1202 10:19:54.525518 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod393f5a5c_ca19_44e2_968c_b66c08df4684.slice/crio-ba812f4397e1b221ab190c9c082e0dbc8f7aa5e27783820abb66249b4477d420 WatchSource:0}: Error finding container ba812f4397e1b221ab190c9c082e0dbc8f7aa5e27783820abb66249b4477d420: Status 404 returned error can't find the container with id ba812f4397e1b221ab190c9c082e0dbc8f7aa5e27783820abb66249b4477d420 Dec 02 10:19:55 crc kubenswrapper[4685]: I1202 10:19:55.284674 4685 generic.go:334] "Generic (PLEG): container finished" podID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerID="44a5fa4e31b8bb43b0072ed87079dd84f974cc09230f1a79dccf8cd802d3bf79" exitCode=0 Dec 02 10:19:55 crc kubenswrapper[4685]: I1202 10:19:55.284736 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" event={"ID":"393f5a5c-ca19-44e2-968c-b66c08df4684","Type":"ContainerDied","Data":"44a5fa4e31b8bb43b0072ed87079dd84f974cc09230f1a79dccf8cd802d3bf79"} Dec 02 10:19:55 crc kubenswrapper[4685]: I1202 10:19:55.284962 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" event={"ID":"393f5a5c-ca19-44e2-968c-b66c08df4684","Type":"ContainerStarted","Data":"ba812f4397e1b221ab190c9c082e0dbc8f7aa5e27783820abb66249b4477d420"} Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.293159 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" event={"ID":"393f5a5c-ca19-44e2-968c-b66c08df4684","Type":"ContainerStarted","Data":"e7ab0fb59822dc88c123ae641e6c2811473be3522ec0f64b97dcdedc37678b82"} Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.293463 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.298150 4685 generic.go:334] "Generic (PLEG): container finished" podID="5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" containerID="882b3af711315c3461e618ab409db4a057393088741a46e2fecad8e14071c331" exitCode=0 Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.298213 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lwcqt" event={"ID":"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c","Type":"ContainerDied","Data":"882b3af711315c3461e618ab409db4a057393088741a46e2fecad8e14071c331"} Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.322009 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" podStartSLOduration=3.321984552 podStartE2EDuration="3.321984552s" podCreationTimestamp="2025-12-02 10:19:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:56.319515634 +0000 UTC m=+1088.691289788" watchObservedRunningTime="2025-12-02 10:19:56.321984552 +0000 UTC m=+1088.693758716" Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.585847 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.905308 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-qhx5w"] Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.911078 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.931492 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qhx5w"] Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.954632 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-operator-scripts\") pod \"cinder-db-create-qhx5w\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:56 crc kubenswrapper[4685]: I1202 10:19:56.954774 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhgb8\" (UniqueName: \"kubernetes.io/projected/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-kube-api-access-zhgb8\") pod \"cinder-db-create-qhx5w\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.039110 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-af9e-account-create-update-7hd8q"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.040434 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.042479 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.049597 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-af9e-account-create-update-7hd8q"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.056476 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-operator-scripts\") pod \"cinder-af9e-account-create-update-7hd8q\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.056532 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2mzk\" (UniqueName: \"kubernetes.io/projected/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-kube-api-access-q2mzk\") pod \"cinder-af9e-account-create-update-7hd8q\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.056606 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-operator-scripts\") pod \"cinder-db-create-qhx5w\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.056655 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhgb8\" (UniqueName: \"kubernetes.io/projected/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-kube-api-access-zhgb8\") pod \"cinder-db-create-qhx5w\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.057760 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-operator-scripts\") pod \"cinder-db-create-qhx5w\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.115568 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-jlm72"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.116587 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.128261 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhgb8\" (UniqueName: \"kubernetes.io/projected/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-kube-api-access-zhgb8\") pod \"cinder-db-create-qhx5w\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.129884 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jlm72"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.158804 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxvv2\" (UniqueName: \"kubernetes.io/projected/30d4b64f-19c4-41d2-b11e-93186a45b66f-kube-api-access-pxvv2\") pod \"barbican-db-create-jlm72\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.158866 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30d4b64f-19c4-41d2-b11e-93186a45b66f-operator-scripts\") pod \"barbican-db-create-jlm72\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.158984 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-operator-scripts\") pod \"cinder-af9e-account-create-update-7hd8q\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.159043 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2mzk\" (UniqueName: \"kubernetes.io/projected/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-kube-api-access-q2mzk\") pod \"cinder-af9e-account-create-update-7hd8q\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.160222 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-operator-scripts\") pod \"cinder-af9e-account-create-update-7hd8q\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.185697 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2mzk\" (UniqueName: \"kubernetes.io/projected/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-kube-api-access-q2mzk\") pod \"cinder-af9e-account-create-update-7hd8q\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.228587 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qhx5w" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.263057 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxvv2\" (UniqueName: \"kubernetes.io/projected/30d4b64f-19c4-41d2-b11e-93186a45b66f-kube-api-access-pxvv2\") pod \"barbican-db-create-jlm72\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.263155 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30d4b64f-19c4-41d2-b11e-93186a45b66f-operator-scripts\") pod \"barbican-db-create-jlm72\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.264067 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30d4b64f-19c4-41d2-b11e-93186a45b66f-operator-scripts\") pod \"barbican-db-create-jlm72\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.305952 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-t4xqf"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.311523 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.342766 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8615-account-create-update-tgbbj"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.344630 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.345843 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxvv2\" (UniqueName: \"kubernetes.io/projected/30d4b64f-19c4-41d2-b11e-93186a45b66f-kube-api-access-pxvv2\") pod \"barbican-db-create-jlm72\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.351410 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.364618 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf2qp\" (UniqueName: \"kubernetes.io/projected/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-kube-api-access-xf2qp\") pod \"neutron-db-create-t4xqf\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.364675 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f45f478-3117-4a0f-98df-9e12cfd6e32c-operator-scripts\") pod \"barbican-8615-account-create-update-tgbbj\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.364762 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-operator-scripts\") pod \"neutron-db-create-t4xqf\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.364904 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vhmc\" (UniqueName: \"kubernetes.io/projected/1f45f478-3117-4a0f-98df-9e12cfd6e32c-kube-api-access-5vhmc\") pod \"barbican-8615-account-create-update-tgbbj\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.365047 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-t4xqf"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.365864 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.391881 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8615-account-create-update-tgbbj"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.448964 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-cbz6h"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.456344 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jlm72" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.456958 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.466008 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-operator-scripts\") pod \"neutron-db-create-t4xqf\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.466099 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vhmc\" (UniqueName: \"kubernetes.io/projected/1f45f478-3117-4a0f-98df-9e12cfd6e32c-kube-api-access-5vhmc\") pod \"barbican-8615-account-create-update-tgbbj\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.466120 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf2qp\" (UniqueName: \"kubernetes.io/projected/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-kube-api-access-xf2qp\") pod \"neutron-db-create-t4xqf\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.466143 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f45f478-3117-4a0f-98df-9e12cfd6e32c-operator-scripts\") pod \"barbican-8615-account-create-update-tgbbj\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.468576 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f45f478-3117-4a0f-98df-9e12cfd6e32c-operator-scripts\") pod \"barbican-8615-account-create-update-tgbbj\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.469160 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.469329 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7kkp" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.469434 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.469735 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.478072 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-operator-scripts\") pod \"neutron-db-create-t4xqf\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.519339 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf2qp\" (UniqueName: \"kubernetes.io/projected/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-kube-api-access-xf2qp\") pod \"neutron-db-create-t4xqf\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.526838 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vhmc\" (UniqueName: \"kubernetes.io/projected/1f45f478-3117-4a0f-98df-9e12cfd6e32c-kube-api-access-5vhmc\") pod \"barbican-8615-account-create-update-tgbbj\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.536030 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cbz6h"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.567414 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-config-data\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.567493 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-combined-ca-bundle\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.567570 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7jhp\" (UniqueName: \"kubernetes.io/projected/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-kube-api-access-t7jhp\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.607130 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-1a78-account-create-update-vmvgt"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.614115 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.617304 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.630472 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1a78-account-create-update-vmvgt"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.669419 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7jhp\" (UniqueName: \"kubernetes.io/projected/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-kube-api-access-t7jhp\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.669479 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlkjx\" (UniqueName: \"kubernetes.io/projected/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-kube-api-access-rlkjx\") pod \"neutron-1a78-account-create-update-vmvgt\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.669543 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-config-data\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.669612 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-operator-scripts\") pod \"neutron-1a78-account-create-update-vmvgt\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.669660 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-combined-ca-bundle\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.675264 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-config-data\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.683520 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-combined-ca-bundle\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.694669 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7jhp\" (UniqueName: \"kubernetes.io/projected/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-kube-api-access-t7jhp\") pod \"keystone-db-sync-cbz6h\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.700953 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t4xqf" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.722201 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.771822 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-operator-scripts\") pod \"neutron-1a78-account-create-update-vmvgt\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.771962 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlkjx\" (UniqueName: \"kubernetes.io/projected/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-kube-api-access-rlkjx\") pod \"neutron-1a78-account-create-update-vmvgt\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.772702 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-operator-scripts\") pod \"neutron-1a78-account-create-update-vmvgt\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.803733 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlkjx\" (UniqueName: \"kubernetes.io/projected/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-kube-api-access-rlkjx\") pod \"neutron-1a78-account-create-update-vmvgt\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.831359 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.893120 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-af9e-account-create-update-7hd8q"] Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.959541 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:19:57 crc kubenswrapper[4685]: I1202 10:19:57.998041 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qhx5w"] Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.064764 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.098081 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-db-sync-config-data\") pod \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.111669 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96v8n\" (UniqueName: \"kubernetes.io/projected/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-kube-api-access-96v8n\") pod \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.111986 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-config-data\") pod \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.112030 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-combined-ca-bundle\") pod \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\" (UID: \"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c\") " Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.121158 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" (UID: "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.131760 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-kube-api-access-96v8n" (OuterVolumeSpecName: "kube-api-access-96v8n") pod "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" (UID: "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c"). InnerVolumeSpecName "kube-api-access-96v8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.210817 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jlm72"] Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.215854 4685 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.215897 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96v8n\" (UniqueName: \"kubernetes.io/projected/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-kube-api-access-96v8n\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.243299 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" (UID: "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.247093 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-config-data" (OuterVolumeSpecName: "config-data") pod "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" (UID: "5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.321464 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.321501 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.333408 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jlm72" event={"ID":"30d4b64f-19c4-41d2-b11e-93186a45b66f","Type":"ContainerStarted","Data":"e4317ce6a261e9c121aa34b9178b896b73adb9c153909505fd30230c107911f8"} Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.336291 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-af9e-account-create-update-7hd8q" event={"ID":"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d","Type":"ContainerStarted","Data":"27bb8b4a6ead383dcd1ac56a47a6dba1bdca259d51853792327147087ddf0616"} Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.338099 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qhx5w" event={"ID":"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf","Type":"ContainerStarted","Data":"cc9e8c79456fe48c22f68e35496daed597a1f246a1fc0be48568ae3f52dcb06f"} Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.349219 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-lwcqt" event={"ID":"5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c","Type":"ContainerDied","Data":"6531fdf3f4485740b662478489afc9de6faced568ec61e05ee15f4451e38bc76"} Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.349265 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6531fdf3f4485740b662478489afc9de6faced568ec61e05ee15f4451e38bc76" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.349329 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-lwcqt" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.528061 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-t4xqf"] Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.541854 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8615-account-create-update-tgbbj"] Dec 02 10:19:58 crc kubenswrapper[4685]: W1202 10:19:58.551473 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f45f478_3117_4a0f_98df_9e12cfd6e32c.slice/crio-22227e5e608901f2a7ce6605a3347b895ef3fc2e29dbd8208831a5ff4cc15b68 WatchSource:0}: Error finding container 22227e5e608901f2a7ce6605a3347b895ef3fc2e29dbd8208831a5ff4cc15b68: Status 404 returned error can't find the container with id 22227e5e608901f2a7ce6605a3347b895ef3fc2e29dbd8208831a5ff4cc15b68 Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.650374 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-cbz6h"] Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.815773 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1a78-account-create-update-vmvgt"] Dec 02 10:19:58 crc kubenswrapper[4685]: W1202 10:19:58.819847 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabf10e29_603f_4ca8_bf1d_9d7dcd0321cf.slice/crio-060518f0a8150e122aa8d1b7ab5fd7476e3ba541085ac642e68a66b9b10cc1cf WatchSource:0}: Error finding container 060518f0a8150e122aa8d1b7ab5fd7476e3ba541085ac642e68a66b9b10cc1cf: Status 404 returned error can't find the container with id 060518f0a8150e122aa8d1b7ab5fd7476e3ba541085ac642e68a66b9b10cc1cf Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.853310 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-j99xz"] Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.853684 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerName="dnsmasq-dns" containerID="cri-o://e7ab0fb59822dc88c123ae641e6c2811473be3522ec0f64b97dcdedc37678b82" gracePeriod=10 Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.902134 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-z58w5"] Dec 02 10:19:58 crc kubenswrapper[4685]: E1202 10:19:58.902525 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" containerName="glance-db-sync" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.902541 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" containerName="glance-db-sync" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.902749 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" containerName="glance-db-sync" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.917986 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:58 crc kubenswrapper[4685]: I1202 10:19:58.959489 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-z58w5"] Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.037754 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.037806 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.037831 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w59h9\" (UniqueName: \"kubernetes.io/projected/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-kube-api-access-w59h9\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.037855 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.037935 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.037963 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-config\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.141181 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w59h9\" (UniqueName: \"kubernetes.io/projected/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-kube-api-access-w59h9\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.141624 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.141768 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.141846 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-config\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.141947 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.142037 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.143437 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.144229 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.144905 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-config\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.145406 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.147001 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.189293 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w59h9\" (UniqueName: \"kubernetes.io/projected/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-kube-api-access-w59h9\") pod \"dnsmasq-dns-74f6bcbc87-z58w5\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.245976 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.460869 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jlm72" event={"ID":"30d4b64f-19c4-41d2-b11e-93186a45b66f","Type":"ContainerStarted","Data":"7f1f09b373104069d4a41258d0a09da721cf878a2c264341201509a70b9cfa52"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.479673 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1a78-account-create-update-vmvgt" event={"ID":"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf","Type":"ContainerStarted","Data":"060518f0a8150e122aa8d1b7ab5fd7476e3ba541085ac642e68a66b9b10cc1cf"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.526653 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-af9e-account-create-update-7hd8q" event={"ID":"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d","Type":"ContainerStarted","Data":"6fa2be5ff627ca8d8a4e37fdbd3590f326ed189f0a88e77d60133375c7163bef"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.547403 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qhx5w" event={"ID":"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf","Type":"ContainerStarted","Data":"31663d0406cddf22bf5aa77408adfccbf8e47aa626ed3c51e1c8a71601f1432b"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.604188 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-af9e-account-create-update-7hd8q" podStartSLOduration=2.6041438919999997 podStartE2EDuration="2.604143892s" podCreationTimestamp="2025-12-02 10:19:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:59.600723339 +0000 UTC m=+1091.972497493" watchObservedRunningTime="2025-12-02 10:19:59.604143892 +0000 UTC m=+1091.975918036" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.623366 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-jlm72" podStartSLOduration=2.62334653 podStartE2EDuration="2.62334653s" podCreationTimestamp="2025-12-02 10:19:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:59.494752568 +0000 UTC m=+1091.866526722" watchObservedRunningTime="2025-12-02 10:19:59.62334653 +0000 UTC m=+1091.995120684" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.627801 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-qhx5w" podStartSLOduration=3.627779701 podStartE2EDuration="3.627779701s" podCreationTimestamp="2025-12-02 10:19:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:59.624915802 +0000 UTC m=+1091.996689956" watchObservedRunningTime="2025-12-02 10:19:59.627779701 +0000 UTC m=+1091.999553855" Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.632039 4685 generic.go:334] "Generic (PLEG): container finished" podID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerID="e7ab0fb59822dc88c123ae641e6c2811473be3522ec0f64b97dcdedc37678b82" exitCode=0 Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.632116 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" event={"ID":"393f5a5c-ca19-44e2-968c-b66c08df4684","Type":"ContainerDied","Data":"e7ab0fb59822dc88c123ae641e6c2811473be3522ec0f64b97dcdedc37678b82"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.648965 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t4xqf" event={"ID":"ed622126-1bc1-49ad-8c9a-be2bfbc94a67","Type":"ContainerStarted","Data":"8f7686b6cd630eb3d4b23ca0fcadf88d3742aa5132318a27ddc05d1da0905e03"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.649005 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t4xqf" event={"ID":"ed622126-1bc1-49ad-8c9a-be2bfbc94a67","Type":"ContainerStarted","Data":"fe6d5af2d6974b55f6b9b144058292b487f660c00a7e66a2f06f1d59d607fd5c"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.656806 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cbz6h" event={"ID":"8c794e1d-41e7-43e2-bd5c-bed20f3ea587","Type":"ContainerStarted","Data":"6665354b07fb686ce9a1a6d7d61216112a35cdeeb1404531482d0681df4bbb27"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.660598 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8615-account-create-update-tgbbj" event={"ID":"1f45f478-3117-4a0f-98df-9e12cfd6e32c","Type":"ContainerStarted","Data":"22227e5e608901f2a7ce6605a3347b895ef3fc2e29dbd8208831a5ff4cc15b68"} Dec 02 10:19:59 crc kubenswrapper[4685]: I1202 10:19:59.668736 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-t4xqf" podStartSLOduration=2.668718936 podStartE2EDuration="2.668718936s" podCreationTimestamp="2025-12-02 10:19:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:19:59.662267689 +0000 UTC m=+1092.034041843" watchObservedRunningTime="2025-12-02 10:19:59.668718936 +0000 UTC m=+1092.040493090" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.096110 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.215885 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-nb\") pod \"393f5a5c-ca19-44e2-968c-b66c08df4684\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.215956 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-sb\") pod \"393f5a5c-ca19-44e2-968c-b66c08df4684\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.216051 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-config\") pod \"393f5a5c-ca19-44e2-968c-b66c08df4684\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.216087 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-swift-storage-0\") pod \"393f5a5c-ca19-44e2-968c-b66c08df4684\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.216132 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4s2p4\" (UniqueName: \"kubernetes.io/projected/393f5a5c-ca19-44e2-968c-b66c08df4684-kube-api-access-4s2p4\") pod \"393f5a5c-ca19-44e2-968c-b66c08df4684\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.216224 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-svc\") pod \"393f5a5c-ca19-44e2-968c-b66c08df4684\" (UID: \"393f5a5c-ca19-44e2-968c-b66c08df4684\") " Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.222910 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/393f5a5c-ca19-44e2-968c-b66c08df4684-kube-api-access-4s2p4" (OuterVolumeSpecName: "kube-api-access-4s2p4") pod "393f5a5c-ca19-44e2-968c-b66c08df4684" (UID: "393f5a5c-ca19-44e2-968c-b66c08df4684"). InnerVolumeSpecName "kube-api-access-4s2p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.292765 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "393f5a5c-ca19-44e2-968c-b66c08df4684" (UID: "393f5a5c-ca19-44e2-968c-b66c08df4684"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.318957 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.319398 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4s2p4\" (UniqueName: \"kubernetes.io/projected/393f5a5c-ca19-44e2-968c-b66c08df4684-kube-api-access-4s2p4\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.329126 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "393f5a5c-ca19-44e2-968c-b66c08df4684" (UID: "393f5a5c-ca19-44e2-968c-b66c08df4684"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.331502 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-z58w5"] Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.337079 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "393f5a5c-ca19-44e2-968c-b66c08df4684" (UID: "393f5a5c-ca19-44e2-968c-b66c08df4684"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.341023 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "393f5a5c-ca19-44e2-968c-b66c08df4684" (UID: "393f5a5c-ca19-44e2-968c-b66c08df4684"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:00 crc kubenswrapper[4685]: W1202 10:20:00.349923 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e80924c_dc2a_4ada_897f_eaf391c8b7d6.slice/crio-91fa370fadd71169d7efb78aa1e7f92422d4305f12a86a746750f66726d34b6c WatchSource:0}: Error finding container 91fa370fadd71169d7efb78aa1e7f92422d4305f12a86a746750f66726d34b6c: Status 404 returned error can't find the container with id 91fa370fadd71169d7efb78aa1e7f92422d4305f12a86a746750f66726d34b6c Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.354456 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-config" (OuterVolumeSpecName: "config") pod "393f5a5c-ca19-44e2-968c-b66c08df4684" (UID: "393f5a5c-ca19-44e2-968c-b66c08df4684"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.421160 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.421381 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.421441 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.421543 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/393f5a5c-ca19-44e2-968c-b66c08df4684-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.672866 4685 generic.go:334] "Generic (PLEG): container finished" podID="dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" containerID="31663d0406cddf22bf5aa77408adfccbf8e47aa626ed3c51e1c8a71601f1432b" exitCode=0 Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.672967 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qhx5w" event={"ID":"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf","Type":"ContainerDied","Data":"31663d0406cddf22bf5aa77408adfccbf8e47aa626ed3c51e1c8a71601f1432b"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.676228 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" event={"ID":"393f5a5c-ca19-44e2-968c-b66c08df4684","Type":"ContainerDied","Data":"ba812f4397e1b221ab190c9c082e0dbc8f7aa5e27783820abb66249b4477d420"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.676373 4685 scope.go:117] "RemoveContainer" containerID="e7ab0fb59822dc88c123ae641e6c2811473be3522ec0f64b97dcdedc37678b82" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.676244 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-j99xz" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.678466 4685 generic.go:334] "Generic (PLEG): container finished" podID="ed622126-1bc1-49ad-8c9a-be2bfbc94a67" containerID="8f7686b6cd630eb3d4b23ca0fcadf88d3742aa5132318a27ddc05d1da0905e03" exitCode=0 Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.678510 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t4xqf" event={"ID":"ed622126-1bc1-49ad-8c9a-be2bfbc94a67","Type":"ContainerDied","Data":"8f7686b6cd630eb3d4b23ca0fcadf88d3742aa5132318a27ddc05d1da0905e03"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.679669 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f45f478-3117-4a0f-98df-9e12cfd6e32c" containerID="9dda76e504be83fb2bc66894f07a81609bf305bc96fd23526198ca268ed174bb" exitCode=0 Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.679704 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8615-account-create-update-tgbbj" event={"ID":"1f45f478-3117-4a0f-98df-9e12cfd6e32c","Type":"ContainerDied","Data":"9dda76e504be83fb2bc66894f07a81609bf305bc96fd23526198ca268ed174bb"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.680702 4685 generic.go:334] "Generic (PLEG): container finished" podID="30d4b64f-19c4-41d2-b11e-93186a45b66f" containerID="7f1f09b373104069d4a41258d0a09da721cf878a2c264341201509a70b9cfa52" exitCode=0 Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.680737 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jlm72" event={"ID":"30d4b64f-19c4-41d2-b11e-93186a45b66f","Type":"ContainerDied","Data":"7f1f09b373104069d4a41258d0a09da721cf878a2c264341201509a70b9cfa52"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.683023 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" event={"ID":"5e80924c-dc2a-4ada-897f-eaf391c8b7d6","Type":"ContainerStarted","Data":"91fa370fadd71169d7efb78aa1e7f92422d4305f12a86a746750f66726d34b6c"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.684816 4685 generic.go:334] "Generic (PLEG): container finished" podID="abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" containerID="0052ff73f2be1b7fdd02ba5e98e16655fac6b9efb1f1d635e8e331114ba4666b" exitCode=0 Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.684856 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1a78-account-create-update-vmvgt" event={"ID":"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf","Type":"ContainerDied","Data":"0052ff73f2be1b7fdd02ba5e98e16655fac6b9efb1f1d635e8e331114ba4666b"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.686196 4685 generic.go:334] "Generic (PLEG): container finished" podID="79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" containerID="6fa2be5ff627ca8d8a4e37fdbd3590f326ed189f0a88e77d60133375c7163bef" exitCode=0 Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.686239 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-af9e-account-create-update-7hd8q" event={"ID":"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d","Type":"ContainerDied","Data":"6fa2be5ff627ca8d8a4e37fdbd3590f326ed189f0a88e77d60133375c7163bef"} Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.716369 4685 scope.go:117] "RemoveContainer" containerID="44a5fa4e31b8bb43b0072ed87079dd84f974cc09230f1a79dccf8cd802d3bf79" Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.794229 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-j99xz"] Dec 02 10:20:00 crc kubenswrapper[4685]: I1202 10:20:00.801194 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-j99xz"] Dec 02 10:20:01 crc kubenswrapper[4685]: I1202 10:20:01.695173 4685 generic.go:334] "Generic (PLEG): container finished" podID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerID="c5ab0619537af61e796f0ad9162aa0cc413cbc137e7b3dbd7f0e60ca777de502" exitCode=0 Dec 02 10:20:01 crc kubenswrapper[4685]: I1202 10:20:01.695345 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" event={"ID":"5e80924c-dc2a-4ada-897f-eaf391c8b7d6","Type":"ContainerDied","Data":"c5ab0619537af61e796f0ad9162aa0cc413cbc137e7b3dbd7f0e60ca777de502"} Dec 02 10:20:01 crc kubenswrapper[4685]: I1202 10:20:01.936269 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" path="/var/lib/kubelet/pods/393f5a5c-ca19-44e2-968c-b66c08df4684/volumes" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.282116 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jlm72" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.320365 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.341816 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.348827 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.365957 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t4xqf" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.386210 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qhx5w" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425269 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlkjx\" (UniqueName: \"kubernetes.io/projected/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-kube-api-access-rlkjx\") pod \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425344 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30d4b64f-19c4-41d2-b11e-93186a45b66f-operator-scripts\") pod \"30d4b64f-19c4-41d2-b11e-93186a45b66f\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425473 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxvv2\" (UniqueName: \"kubernetes.io/projected/30d4b64f-19c4-41d2-b11e-93186a45b66f-kube-api-access-pxvv2\") pod \"30d4b64f-19c4-41d2-b11e-93186a45b66f\" (UID: \"30d4b64f-19c4-41d2-b11e-93186a45b66f\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425500 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f45f478-3117-4a0f-98df-9e12cfd6e32c-operator-scripts\") pod \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425553 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-operator-scripts\") pod \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425630 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2mzk\" (UniqueName: \"kubernetes.io/projected/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-kube-api-access-q2mzk\") pod \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\" (UID: \"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425661 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vhmc\" (UniqueName: \"kubernetes.io/projected/1f45f478-3117-4a0f-98df-9e12cfd6e32c-kube-api-access-5vhmc\") pod \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\" (UID: \"1f45f478-3117-4a0f-98df-9e12cfd6e32c\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.425686 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-operator-scripts\") pod \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\" (UID: \"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.427059 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f45f478-3117-4a0f-98df-9e12cfd6e32c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f45f478-3117-4a0f-98df-9e12cfd6e32c" (UID: "1f45f478-3117-4a0f-98df-9e12cfd6e32c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.427404 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" (UID: "abf10e29-603f-4ca8-bf1d-9d7dcd0321cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.428175 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30d4b64f-19c4-41d2-b11e-93186a45b66f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30d4b64f-19c4-41d2-b11e-93186a45b66f" (UID: "30d4b64f-19c4-41d2-b11e-93186a45b66f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.428187 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" (UID: "79bdcad7-d10f-44cd-ad3d-74f2ef5e361d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.431716 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f45f478-3117-4a0f-98df-9e12cfd6e32c-kube-api-access-5vhmc" (OuterVolumeSpecName: "kube-api-access-5vhmc") pod "1f45f478-3117-4a0f-98df-9e12cfd6e32c" (UID: "1f45f478-3117-4a0f-98df-9e12cfd6e32c"). InnerVolumeSpecName "kube-api-access-5vhmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.432323 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30d4b64f-19c4-41d2-b11e-93186a45b66f-kube-api-access-pxvv2" (OuterVolumeSpecName: "kube-api-access-pxvv2") pod "30d4b64f-19c4-41d2-b11e-93186a45b66f" (UID: "30d4b64f-19c4-41d2-b11e-93186a45b66f"). InnerVolumeSpecName "kube-api-access-pxvv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.432839 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-kube-api-access-rlkjx" (OuterVolumeSpecName: "kube-api-access-rlkjx") pod "abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" (UID: "abf10e29-603f-4ca8-bf1d-9d7dcd0321cf"). InnerVolumeSpecName "kube-api-access-rlkjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.434707 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-kube-api-access-q2mzk" (OuterVolumeSpecName: "kube-api-access-q2mzk") pod "79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" (UID: "79bdcad7-d10f-44cd-ad3d-74f2ef5e361d"). InnerVolumeSpecName "kube-api-access-q2mzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527349 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhgb8\" (UniqueName: \"kubernetes.io/projected/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-kube-api-access-zhgb8\") pod \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527408 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-operator-scripts\") pod \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\" (UID: \"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527482 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-operator-scripts\") pod \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527499 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf2qp\" (UniqueName: \"kubernetes.io/projected/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-kube-api-access-xf2qp\") pod \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\" (UID: \"ed622126-1bc1-49ad-8c9a-be2bfbc94a67\") " Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527858 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30d4b64f-19c4-41d2-b11e-93186a45b66f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527870 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxvv2\" (UniqueName: \"kubernetes.io/projected/30d4b64f-19c4-41d2-b11e-93186a45b66f-kube-api-access-pxvv2\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527882 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f45f478-3117-4a0f-98df-9e12cfd6e32c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527890 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527898 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2mzk\" (UniqueName: \"kubernetes.io/projected/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d-kube-api-access-q2mzk\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527922 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vhmc\" (UniqueName: \"kubernetes.io/projected/1f45f478-3117-4a0f-98df-9e12cfd6e32c-kube-api-access-5vhmc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527933 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.527944 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlkjx\" (UniqueName: \"kubernetes.io/projected/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf-kube-api-access-rlkjx\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.528693 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed622126-1bc1-49ad-8c9a-be2bfbc94a67" (UID: "ed622126-1bc1-49ad-8c9a-be2bfbc94a67"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.529227 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" (UID: "dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.530550 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-kube-api-access-xf2qp" (OuterVolumeSpecName: "kube-api-access-xf2qp") pod "ed622126-1bc1-49ad-8c9a-be2bfbc94a67" (UID: "ed622126-1bc1-49ad-8c9a-be2bfbc94a67"). InnerVolumeSpecName "kube-api-access-xf2qp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.531118 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-kube-api-access-zhgb8" (OuterVolumeSpecName: "kube-api-access-zhgb8") pod "dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" (UID: "dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf"). InnerVolumeSpecName "kube-api-access-zhgb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.628812 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhgb8\" (UniqueName: \"kubernetes.io/projected/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-kube-api-access-zhgb8\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.628844 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.628855 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.628864 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf2qp\" (UniqueName: \"kubernetes.io/projected/ed622126-1bc1-49ad-8c9a-be2bfbc94a67-kube-api-access-xf2qp\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.736998 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8615-account-create-update-tgbbj" event={"ID":"1f45f478-3117-4a0f-98df-9e12cfd6e32c","Type":"ContainerDied","Data":"22227e5e608901f2a7ce6605a3347b895ef3fc2e29dbd8208831a5ff4cc15b68"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.737039 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22227e5e608901f2a7ce6605a3347b895ef3fc2e29dbd8208831a5ff4cc15b68" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.737085 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8615-account-create-update-tgbbj" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.745960 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jlm72" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.746407 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jlm72" event={"ID":"30d4b64f-19c4-41d2-b11e-93186a45b66f","Type":"ContainerDied","Data":"e4317ce6a261e9c121aa34b9178b896b73adb9c153909505fd30230c107911f8"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.746589 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4317ce6a261e9c121aa34b9178b896b73adb9c153909505fd30230c107911f8" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.749490 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" event={"ID":"5e80924c-dc2a-4ada-897f-eaf391c8b7d6","Type":"ContainerStarted","Data":"8eed82684ee872fd6fffa21c38055cccaedda70b8226144ee84758494cc6a9af"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.749727 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.751940 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1a78-account-create-update-vmvgt" event={"ID":"abf10e29-603f-4ca8-bf1d-9d7dcd0321cf","Type":"ContainerDied","Data":"060518f0a8150e122aa8d1b7ab5fd7476e3ba541085ac642e68a66b9b10cc1cf"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.752056 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="060518f0a8150e122aa8d1b7ab5fd7476e3ba541085ac642e68a66b9b10cc1cf" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.751954 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1a78-account-create-update-vmvgt" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.753218 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-af9e-account-create-update-7hd8q" event={"ID":"79bdcad7-d10f-44cd-ad3d-74f2ef5e361d","Type":"ContainerDied","Data":"27bb8b4a6ead383dcd1ac56a47a6dba1bdca259d51853792327147087ddf0616"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.753258 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27bb8b4a6ead383dcd1ac56a47a6dba1bdca259d51853792327147087ddf0616" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.753309 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-af9e-account-create-update-7hd8q" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.765643 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qhx5w" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.771359 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qhx5w" event={"ID":"dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf","Type":"ContainerDied","Data":"cc9e8c79456fe48c22f68e35496daed597a1f246a1fc0be48568ae3f52dcb06f"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.771427 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc9e8c79456fe48c22f68e35496daed597a1f246a1fc0be48568ae3f52dcb06f" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.773668 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-t4xqf" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.773842 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-t4xqf" event={"ID":"ed622126-1bc1-49ad-8c9a-be2bfbc94a67","Type":"ContainerDied","Data":"fe6d5af2d6974b55f6b9b144058292b487f660c00a7e66a2f06f1d59d607fd5c"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.774012 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe6d5af2d6974b55f6b9b144058292b487f660c00a7e66a2f06f1d59d607fd5c" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.778878 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cbz6h" event={"ID":"8c794e1d-41e7-43e2-bd5c-bed20f3ea587","Type":"ContainerStarted","Data":"aafb76207a76a15e7374329db4aad5f9db70949a8544585f98dc1126f4876109"} Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.783213 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" podStartSLOduration=7.783194273 podStartE2EDuration="7.783194273s" podCreationTimestamp="2025-12-02 10:19:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:05.780965052 +0000 UTC m=+1098.152739216" watchObservedRunningTime="2025-12-02 10:20:05.783194273 +0000 UTC m=+1098.154968427" Dec 02 10:20:05 crc kubenswrapper[4685]: I1202 10:20:05.810886 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-cbz6h" podStartSLOduration=2.382223978 podStartE2EDuration="8.810868922s" podCreationTimestamp="2025-12-02 10:19:57 +0000 UTC" firstStartedPulling="2025-12-02 10:19:58.684826508 +0000 UTC m=+1091.056600662" lastFinishedPulling="2025-12-02 10:20:05.113471452 +0000 UTC m=+1097.485245606" observedRunningTime="2025-12-02 10:20:05.806926624 +0000 UTC m=+1098.178700778" watchObservedRunningTime="2025-12-02 10:20:05.810868922 +0000 UTC m=+1098.182643086" Dec 02 10:20:08 crc kubenswrapper[4685]: I1202 10:20:08.804442 4685 generic.go:334] "Generic (PLEG): container finished" podID="8c794e1d-41e7-43e2-bd5c-bed20f3ea587" containerID="aafb76207a76a15e7374329db4aad5f9db70949a8544585f98dc1126f4876109" exitCode=0 Dec 02 10:20:08 crc kubenswrapper[4685]: I1202 10:20:08.804514 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cbz6h" event={"ID":"8c794e1d-41e7-43e2-bd5c-bed20f3ea587","Type":"ContainerDied","Data":"aafb76207a76a15e7374329db4aad5f9db70949a8544585f98dc1126f4876109"} Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.135990 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.306155 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7jhp\" (UniqueName: \"kubernetes.io/projected/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-kube-api-access-t7jhp\") pod \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.306263 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-combined-ca-bundle\") pod \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.306333 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-config-data\") pod \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\" (UID: \"8c794e1d-41e7-43e2-bd5c-bed20f3ea587\") " Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.313799 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-kube-api-access-t7jhp" (OuterVolumeSpecName: "kube-api-access-t7jhp") pod "8c794e1d-41e7-43e2-bd5c-bed20f3ea587" (UID: "8c794e1d-41e7-43e2-bd5c-bed20f3ea587"). InnerVolumeSpecName "kube-api-access-t7jhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.332310 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c794e1d-41e7-43e2-bd5c-bed20f3ea587" (UID: "8c794e1d-41e7-43e2-bd5c-bed20f3ea587"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.354257 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-config-data" (OuterVolumeSpecName: "config-data") pod "8c794e1d-41e7-43e2-bd5c-bed20f3ea587" (UID: "8c794e1d-41e7-43e2-bd5c-bed20f3ea587"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.408456 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7jhp\" (UniqueName: \"kubernetes.io/projected/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-kube-api-access-t7jhp\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.408492 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.408507 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c794e1d-41e7-43e2-bd5c-bed20f3ea587-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.821551 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-cbz6h" event={"ID":"8c794e1d-41e7-43e2-bd5c-bed20f3ea587","Type":"ContainerDied","Data":"6665354b07fb686ce9a1a6d7d61216112a35cdeeb1404531482d0681df4bbb27"} Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.821639 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6665354b07fb686ce9a1a6d7d61216112a35cdeeb1404531482d0681df4bbb27" Dec 02 10:20:10 crc kubenswrapper[4685]: I1202 10:20:10.821656 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-cbz6h" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127044 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-w2zzg"] Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127462 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerName="init" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127479 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerName="init" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127491 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c794e1d-41e7-43e2-bd5c-bed20f3ea587" containerName="keystone-db-sync" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127499 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c794e1d-41e7-43e2-bd5c-bed20f3ea587" containerName="keystone-db-sync" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127518 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127525 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127542 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed622126-1bc1-49ad-8c9a-be2bfbc94a67" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127550 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed622126-1bc1-49ad-8c9a-be2bfbc94a67" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127581 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127590 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127603 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f45f478-3117-4a0f-98df-9e12cfd6e32c" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127610 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f45f478-3117-4a0f-98df-9e12cfd6e32c" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127635 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerName="dnsmasq-dns" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127643 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerName="dnsmasq-dns" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127654 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127661 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: E1202 10:20:11.127676 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d4b64f-19c4-41d2-b11e-93186a45b66f" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127686 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d4b64f-19c4-41d2-b11e-93186a45b66f" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127879 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c794e1d-41e7-43e2-bd5c-bed20f3ea587" containerName="keystone-db-sync" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127898 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127908 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f45f478-3117-4a0f-98df-9e12cfd6e32c" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127918 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed622126-1bc1-49ad-8c9a-be2bfbc94a67" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127930 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127941 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" containerName="mariadb-account-create-update" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127962 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="30d4b64f-19c4-41d2-b11e-93186a45b66f" containerName="mariadb-database-create" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.127971 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="393f5a5c-ca19-44e2-968c-b66c08df4684" containerName="dnsmasq-dns" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.128702 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.133267 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.133296 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.133760 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.133969 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7kkp" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.134103 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.153996 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-z58w5"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.155944 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerName="dnsmasq-dns" containerID="cri-o://8eed82684ee872fd6fffa21c38055cccaedda70b8226144ee84758494cc6a9af" gracePeriod=10 Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.160145 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.179163 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w2zzg"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.221291 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-combined-ca-bundle\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.221344 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-scripts\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.221387 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-credential-keys\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.221421 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-config-data\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.221439 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x77b9\" (UniqueName: \"kubernetes.io/projected/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-kube-api-access-x77b9\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.221503 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-fernet-keys\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.292906 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-qbpr2"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.294452 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.317370 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-qbpr2"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.328007 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-fernet-keys\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.328108 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-combined-ca-bundle\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.328150 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-scripts\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.328185 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-credential-keys\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.328219 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-config-data\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.328235 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x77b9\" (UniqueName: \"kubernetes.io/projected/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-kube-api-access-x77b9\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.333329 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-fernet-keys\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.339915 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-scripts\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.342196 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-credential-keys\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.347361 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-config-data\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.370135 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-combined-ca-bundle\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.377473 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x77b9\" (UniqueName: \"kubernetes.io/projected/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-kube-api-access-x77b9\") pod \"keystone-bootstrap-w2zzg\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.429784 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.429825 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.429853 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg2sp\" (UniqueName: \"kubernetes.io/projected/6e73b95c-e958-4cec-ac51-5548736d68e3-kube-api-access-fg2sp\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.429877 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-config\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.430046 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.430096 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-svc\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.481343 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.531828 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.531880 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.531922 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg2sp\" (UniqueName: \"kubernetes.io/projected/6e73b95c-e958-4cec-ac51-5548736d68e3-kube-api-access-fg2sp\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.531952 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-config\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.532013 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.532045 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-svc\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.533207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-svc\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.533848 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.534387 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-config\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.534588 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.534847 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.557435 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg2sp\" (UniqueName: \"kubernetes.io/projected/6e73b95c-e958-4cec-ac51-5548736d68e3-kube-api-access-fg2sp\") pod \"dnsmasq-dns-847c4cc679-qbpr2\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.730041 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.778529 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-f47mj"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.788129 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.810232 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-xx8rz"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.811189 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.823102 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.823410 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.823654 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-p568z" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.823824 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.824118 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zsjdb" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.824296 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.854642 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-mfhwg"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.855843 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.864689 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-xx8rz"] Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.865754 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-s6w2j" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.877090 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.924875 4685 generic.go:334] "Generic (PLEG): container finished" podID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerID="8eed82684ee872fd6fffa21c38055cccaedda70b8226144ee84758494cc6a9af" exitCode=0 Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.939986 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-config-data\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940025 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-config\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940049 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxtmm\" (UniqueName: \"kubernetes.io/projected/83601b22-374e-4d44-85a3-eed233d3ff11-kube-api-access-gxtmm\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940065 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-scripts\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940090 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5b558b4d-8398-435d-8925-e36325681252-etc-machine-id\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940134 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-combined-ca-bundle\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940166 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-combined-ca-bundle\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940192 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-db-sync-config-data\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940220 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwrmm\" (UniqueName: \"kubernetes.io/projected/5b558b4d-8398-435d-8925-e36325681252-kube-api-access-bwrmm\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940237 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-db-sync-config-data\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940261 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-combined-ca-bundle\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:11 crc kubenswrapper[4685]: I1202 10:20:11.940298 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjphf\" (UniqueName: \"kubernetes.io/projected/a7e4b942-530d-42f2-8ba6-f432991c850d-kube-api-access-qjphf\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.009629 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" event={"ID":"5e80924c-dc2a-4ada-897f-eaf391c8b7d6","Type":"ContainerDied","Data":"8eed82684ee872fd6fffa21c38055cccaedda70b8226144ee84758494cc6a9af"} Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.009676 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-f47mj"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.009699 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mfhwg"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050496 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-combined-ca-bundle\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050578 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-combined-ca-bundle\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050610 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-db-sync-config-data\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050645 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwrmm\" (UniqueName: \"kubernetes.io/projected/5b558b4d-8398-435d-8925-e36325681252-kube-api-access-bwrmm\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050666 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-db-sync-config-data\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050697 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-combined-ca-bundle\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050752 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjphf\" (UniqueName: \"kubernetes.io/projected/a7e4b942-530d-42f2-8ba6-f432991c850d-kube-api-access-qjphf\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050779 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-config-data\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050816 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-config\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050842 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxtmm\" (UniqueName: \"kubernetes.io/projected/83601b22-374e-4d44-85a3-eed233d3ff11-kube-api-access-gxtmm\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050859 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-scripts\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050888 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5b558b4d-8398-435d-8925-e36325681252-etc-machine-id\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.050996 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5b558b4d-8398-435d-8925-e36325681252-etc-machine-id\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.065142 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-combined-ca-bundle\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.067418 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-config-data\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.123894 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-db-sync-config-data\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.123908 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-db-sync-config-data\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.135354 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxtmm\" (UniqueName: \"kubernetes.io/projected/83601b22-374e-4d44-85a3-eed233d3ff11-kube-api-access-gxtmm\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.144052 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjphf\" (UniqueName: \"kubernetes.io/projected/a7e4b942-530d-42f2-8ba6-f432991c850d-kube-api-access-qjphf\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.146297 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-config\") pod \"neutron-db-sync-f47mj\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.146839 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-combined-ca-bundle\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.147194 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.147227 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.154523 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwrmm\" (UniqueName: \"kubernetes.io/projected/5b558b4d-8398-435d-8925-e36325681252-kube-api-access-bwrmm\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.165182 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-combined-ca-bundle\") pod \"barbican-db-sync-mfhwg\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.165277 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-599497779c-mk6dq"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.168083 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.176735 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-bpvd5" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.176946 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.177155 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.178336 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-scripts\") pod \"cinder-db-sync-xx8rz\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.180515 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.194366 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-599497779c-mk6dq"] Dec 02 10:20:12 crc kubenswrapper[4685]: W1202 10:20:12.195660 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fe281bb_9242_40f2_9dea_4a0a0fdb066d.slice/crio-9478d1ec264dd77a02728b67732d2adcfe4ad8be41fc8c9d7018c1ff112cc74b WatchSource:0}: Error finding container 9478d1ec264dd77a02728b67732d2adcfe4ad8be41fc8c9d7018c1ff112cc74b: Status 404 returned error can't find the container with id 9478d1ec264dd77a02728b67732d2adcfe4ad8be41fc8c9d7018c1ff112cc74b Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.199924 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.266338 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-scripts\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.266775 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gmxv\" (UniqueName: \"kubernetes.io/projected/1abe19bf-791f-4ca4-a98e-f224511c7614-kube-api-access-5gmxv\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.266855 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1abe19bf-791f-4ca4-a98e-f224511c7614-logs\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.266918 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1abe19bf-791f-4ca4-a98e-f224511c7614-horizon-secret-key\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.266949 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-config-data\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.278034 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.314426 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-9m45k"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.315491 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.322364 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.324510 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.337660 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-xz5hx" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.354935 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-9m45k"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.370735 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-w2zzg"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371580 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1abe19bf-791f-4ca4-a98e-f224511c7614-logs\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371621 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s62bt\" (UniqueName: \"kubernetes.io/projected/fcc4c853-7816-4912-9c98-55c29ae90396-kube-api-access-s62bt\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371658 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-scripts\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371679 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1abe19bf-791f-4ca4-a98e-f224511c7614-horizon-secret-key\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371700 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-config-data\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371737 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-config-data\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371762 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-scripts\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371786 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-combined-ca-bundle\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371804 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gmxv\" (UniqueName: \"kubernetes.io/projected/1abe19bf-791f-4ca4-a98e-f224511c7614-kube-api-access-5gmxv\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.371834 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcc4c853-7816-4912-9c98-55c29ae90396-logs\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.372214 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1abe19bf-791f-4ca4-a98e-f224511c7614-logs\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.373111 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-scripts\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.373984 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-config-data\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.394776 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1abe19bf-791f-4ca4-a98e-f224511c7614-horizon-secret-key\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.403014 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.416638 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-qbpr2"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.445434 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gmxv\" (UniqueName: \"kubernetes.io/projected/1abe19bf-791f-4ca4-a98e-f224511c7614-kube-api-access-5gmxv\") pod \"horizon-599497779c-mk6dq\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.453692 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4b9pm"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.470649 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.472915 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-combined-ca-bundle\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.472962 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcc4c853-7816-4912-9c98-55c29ae90396-logs\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.473016 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s62bt\" (UniqueName: \"kubernetes.io/projected/fcc4c853-7816-4912-9c98-55c29ae90396-kube-api-access-s62bt\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.473050 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-scripts\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.473100 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-config-data\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.474398 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcc4c853-7816-4912-9c98-55c29ae90396-logs\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.490803 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-config-data\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.495629 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-scripts\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.504997 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4b9pm"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.506582 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-combined-ca-bundle\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.518831 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s62bt\" (UniqueName: \"kubernetes.io/projected/fcc4c853-7816-4912-9c98-55c29ae90396-kube-api-access-s62bt\") pod \"placement-db-sync-9m45k\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.544380 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.546752 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.589273 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dfpbr" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.589640 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.589790 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.589908 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.597158 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.598145 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9m45k" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.610035 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7dff9fbf89-bfx5f"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.627951 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.628299 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.670976 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7dff9fbf89-bfx5f"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679628 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679670 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-scripts\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679725 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679763 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-config\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679786 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679846 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xzdp\" (UniqueName: \"kubernetes.io/projected/ae11b944-e16a-4404-94b7-c28fec4c5cc4-kube-api-access-4xzdp\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679868 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-config-data\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679913 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4qmh\" (UniqueName: \"kubernetes.io/projected/d244c39e-7702-4abd-bdf1-8d6331369ff3-kube-api-access-v4qmh\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679944 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.679995 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.680031 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.680056 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.680111 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-logs\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.745745 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.747454 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.752149 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.753932 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.780868 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782592 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-config\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782663 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782715 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xzdp\" (UniqueName: \"kubernetes.io/projected/ae11b944-e16a-4404-94b7-c28fec4c5cc4-kube-api-access-4xzdp\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782734 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-config-data\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782763 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4qmh\" (UniqueName: \"kubernetes.io/projected/d244c39e-7702-4abd-bdf1-8d6331369ff3-kube-api-access-v4qmh\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782786 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782840 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782880 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-config-data\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782905 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782922 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bc68e28-dc5a-4af7-803d-a5ea85de0827-logs\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782942 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782960 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8bc68e28-dc5a-4af7-803d-a5ea85de0827-horizon-secret-key\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.782993 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-scripts\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783016 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-logs\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783037 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvj87\" (UniqueName: \"kubernetes.io/projected/8bc68e28-dc5a-4af7-803d-a5ea85de0827-kube-api-access-xvj87\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783060 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783097 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-scripts\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783117 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783134 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.783491 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-config\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.784138 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-logs\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.784257 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.784755 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.785094 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.785860 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.786777 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.787242 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.808883 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-scripts\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.813532 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.817443 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xzdp\" (UniqueName: \"kubernetes.io/projected/ae11b944-e16a-4404-94b7-c28fec4c5cc4-kube-api-access-4xzdp\") pod \"dnsmasq-dns-785d8bcb8c-4b9pm\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.818226 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4qmh\" (UniqueName: \"kubernetes.io/projected/d244c39e-7702-4abd-bdf1-8d6331369ff3-kube-api-access-v4qmh\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.824993 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.830688 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-config-data\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.841172 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.854362 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.855182 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.857702 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.857867 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884568 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884616 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-scripts\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884654 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvj87\" (UniqueName: \"kubernetes.io/projected/8bc68e28-dc5a-4af7-803d-a5ea85de0827-kube-api-access-xvj87\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884716 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884753 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884793 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6zzf\" (UniqueName: \"kubernetes.io/projected/e1413696-131d-416c-a29c-9380008922ae-kube-api-access-n6zzf\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884852 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884873 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884895 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-logs\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884920 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-config-data\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884945 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bc68e28-dc5a-4af7-803d-a5ea85de0827-logs\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884964 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.884984 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8bc68e28-dc5a-4af7-803d-a5ea85de0827-horizon-secret-key\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.885385 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-scripts\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.895431 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8bc68e28-dc5a-4af7-803d-a5ea85de0827-horizon-secret-key\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.897369 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bc68e28-dc5a-4af7-803d-a5ea85de0827-logs\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.900087 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-config-data\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.900720 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.913448 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.937061 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.956347 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvj87\" (UniqueName: \"kubernetes.io/projected/8bc68e28-dc5a-4af7-803d-a5ea85de0827-kube-api-access-xvj87\") pod \"horizon-7dff9fbf89-bfx5f\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.984567 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2zzg" event={"ID":"3fe281bb-9242-40f2-9dea-4a0a0fdb066d","Type":"ContainerStarted","Data":"9478d1ec264dd77a02728b67732d2adcfe4ad8be41fc8c9d7018c1ff112cc74b"} Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.986421 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-swift-storage-0\") pod \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.986515 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-sb\") pod \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.986645 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-config\") pod \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.986678 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-nb\") pod \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.986756 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w59h9\" (UniqueName: \"kubernetes.io/projected/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-kube-api-access-w59h9\") pod \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.986796 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-svc\") pod \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\" (UID: \"5e80924c-dc2a-4ada-897f-eaf391c8b7d6\") " Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.992851 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6zzf\" (UniqueName: \"kubernetes.io/projected/e1413696-131d-416c-a29c-9380008922ae-kube-api-access-n6zzf\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.992948 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.992980 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993017 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-logs\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993095 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-scripts\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993139 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-run-httpd\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993159 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk9mm\" (UniqueName: \"kubernetes.io/projected/d644bf2c-8162-4066-b3c0-3751842256c9-kube-api-access-tk9mm\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993174 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993197 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993232 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-config-data\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993287 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993349 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993372 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-log-httpd\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:12 crc kubenswrapper[4685]: I1202 10:20:12.993397 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.007987 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" event={"ID":"5e80924c-dc2a-4ada-897f-eaf391c8b7d6","Type":"ContainerDied","Data":"91fa370fadd71169d7efb78aa1e7f92422d4305f12a86a746750f66726d34b6c"} Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.008085 4685 scope.go:117] "RemoveContainer" containerID="8eed82684ee872fd6fffa21c38055cccaedda70b8226144ee84758494cc6a9af" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.008322 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-z58w5" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.026108 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.026833 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-logs\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.028107 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.042323 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.068956 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-kube-api-access-w59h9" (OuterVolumeSpecName: "kube-api-access-w59h9") pod "5e80924c-dc2a-4ada-897f-eaf391c8b7d6" (UID: "5e80924c-dc2a-4ada-897f-eaf391c8b7d6"). InnerVolumeSpecName "kube-api-access-w59h9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.048624 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.147952 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.148033 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.148671 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.149650 4685 scope.go:117] "RemoveContainer" containerID="c5ab0619537af61e796f0ad9162aa0cc413cbc137e7b3dbd7f0e60ca777de502" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.151581 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-scripts\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.152089 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-run-httpd\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.152156 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk9mm\" (UniqueName: \"kubernetes.io/projected/d644bf2c-8162-4066-b3c0-3751842256c9-kube-api-access-tk9mm\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.152207 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.152266 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-config-data\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.152459 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.152631 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-log-httpd\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.154368 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w59h9\" (UniqueName: \"kubernetes.io/projected/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-kube-api-access-w59h9\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.168284 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6zzf\" (UniqueName: \"kubernetes.io/projected/e1413696-131d-416c-a29c-9380008922ae-kube-api-access-n6zzf\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.181021 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-qbpr2"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.184301 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-run-httpd\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.201407 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5e80924c-dc2a-4ada-897f-eaf391c8b7d6" (UID: "5e80924c-dc2a-4ada-897f-eaf391c8b7d6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.204220 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.221207 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-log-httpd\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.221698 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.222865 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e80924c-dc2a-4ada-897f-eaf391c8b7d6" (UID: "5e80924c-dc2a-4ada-897f-eaf391c8b7d6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.224755 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-scripts\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.259513 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.259899 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.259928 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.264744 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-config-data\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.265345 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk9mm\" (UniqueName: \"kubernetes.io/projected/d644bf2c-8162-4066-b3c0-3751842256c9-kube-api-access-tk9mm\") pod \"ceilometer-0\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.310675 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.335637 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-f47mj"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.344501 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-xx8rz"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.372868 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.423745 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5e80924c-dc2a-4ada-897f-eaf391c8b7d6" (UID: "5e80924c-dc2a-4ada-897f-eaf391c8b7d6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.451395 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5e80924c-dc2a-4ada-897f-eaf391c8b7d6" (UID: "5e80924c-dc2a-4ada-897f-eaf391c8b7d6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.463529 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.463568 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.479439 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.529926 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-config" (OuterVolumeSpecName: "config") pod "5e80924c-dc2a-4ada-897f-eaf391c8b7d6" (UID: "5e80924c-dc2a-4ada-897f-eaf391c8b7d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.544779 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-mfhwg"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.580409 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e80924c-dc2a-4ada-897f-eaf391c8b7d6-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.674381 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-z58w5"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.687230 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-z58w5"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.694068 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-599497779c-mk6dq"] Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.721783 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-9m45k"] Dec 02 10:20:13 crc kubenswrapper[4685]: W1202 10:20:13.794103 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfcc4c853_7816_4912_9c98_55c29ae90396.slice/crio-33dfe9111cbf027109d054691824231ea48dbfcea72739ecd1636ef142b2be8a WatchSource:0}: Error finding container 33dfe9111cbf027109d054691824231ea48dbfcea72739ecd1636ef142b2be8a: Status 404 returned error can't find the container with id 33dfe9111cbf027109d054691824231ea48dbfcea72739ecd1636ef142b2be8a Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.918661 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" path="/var/lib/kubelet/pods/5e80924c-dc2a-4ada-897f-eaf391c8b7d6/volumes" Dec 02 10:20:13 crc kubenswrapper[4685]: I1202 10:20:13.919543 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4b9pm"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.035259 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f47mj" event={"ID":"a7e4b942-530d-42f2-8ba6-f432991c850d","Type":"ContainerStarted","Data":"861a570cc2c079de5fc578479fd15720d12453e06679d6efe975970bc7ae5b65"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.035308 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f47mj" event={"ID":"a7e4b942-530d-42f2-8ba6-f432991c850d","Type":"ContainerStarted","Data":"021da3518388911d36e80b6b1ac555663f23d66881e24a14dfbc8e23e3bba1ea"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.051473 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xx8rz" event={"ID":"5b558b4d-8398-435d-8925-e36325681252","Type":"ContainerStarted","Data":"d885b5d5862014ec23199b58153df4d2b4d21134418dbf324ce405ed74df2ba1"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.065280 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-f47mj" podStartSLOduration=3.065261123 podStartE2EDuration="3.065261123s" podCreationTimestamp="2025-12-02 10:20:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:14.063795153 +0000 UTC m=+1106.435569307" watchObservedRunningTime="2025-12-02 10:20:14.065261123 +0000 UTC m=+1106.437035267" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.067985 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mfhwg" event={"ID":"83601b22-374e-4d44-85a3-eed233d3ff11","Type":"ContainerStarted","Data":"3ec4d5126299057b4efa8b75d84c56df65fd254af4fedce02ba42398480187bb"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.074662 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-599497779c-mk6dq" event={"ID":"1abe19bf-791f-4ca4-a98e-f224511c7614","Type":"ContainerStarted","Data":"19133f4a64b93ea36afa435c4520f38cf8fba051f4a9f68d010eadeb2fcac5af"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.087886 4685 generic.go:334] "Generic (PLEG): container finished" podID="6e73b95c-e958-4cec-ac51-5548736d68e3" containerID="2f6812433d1a45e5d89157b77ad2b651a169d57bd879d04e468c21241b5d1e98" exitCode=0 Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.087963 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" event={"ID":"6e73b95c-e958-4cec-ac51-5548736d68e3","Type":"ContainerDied","Data":"2f6812433d1a45e5d89157b77ad2b651a169d57bd879d04e468c21241b5d1e98"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.087988 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" event={"ID":"6e73b95c-e958-4cec-ac51-5548736d68e3","Type":"ContainerStarted","Data":"fa6b2fd84505146f0858182041888ccf3a6f44b0810a9313dc756478a22d90b9"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.108766 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" event={"ID":"ae11b944-e16a-4404-94b7-c28fec4c5cc4","Type":"ContainerStarted","Data":"89fd6f9e3a51d9f67606e7d09f27e02afbfb5b8733b19734d02cc922780c29c1"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.118749 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7dff9fbf89-bfx5f"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.130529 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9m45k" event={"ID":"fcc4c853-7816-4912-9c98-55c29ae90396","Type":"ContainerStarted","Data":"33dfe9111cbf027109d054691824231ea48dbfcea72739ecd1636ef142b2be8a"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.148528 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2zzg" event={"ID":"3fe281bb-9242-40f2-9dea-4a0a0fdb066d","Type":"ContainerStarted","Data":"2d509261c9f62fce41299f0f9b3334a812421998abe7e1d7cb0de9f9bb1e0e16"} Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.193274 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-w2zzg" podStartSLOduration=3.193246078 podStartE2EDuration="3.193246078s" podCreationTimestamp="2025-12-02 10:20:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:14.17146562 +0000 UTC m=+1106.543239774" watchObservedRunningTime="2025-12-02 10:20:14.193246078 +0000 UTC m=+1106.565020252" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.242935 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.459291 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.685240 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.709679 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-599497779c-mk6dq"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.714437 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.860757 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-76569bb57f-cfg2r"] Dec 02 10:20:14 crc kubenswrapper[4685]: E1202 10:20:14.861257 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerName="init" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.861272 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerName="init" Dec 02 10:20:14 crc kubenswrapper[4685]: E1202 10:20:14.861287 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerName="dnsmasq-dns" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.861294 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerName="dnsmasq-dns" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.861469 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e80924c-dc2a-4ada-897f-eaf391c8b7d6" containerName="dnsmasq-dns" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.863599 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.887412 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.951612 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76569bb57f-cfg2r"] Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.953318 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.985485 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03e46e54-bac0-4966-8d71-e1c353d3b7db-logs\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.985528 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-scripts\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:14 crc kubenswrapper[4685]: I1202 10:20:14.999734 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03e46e54-bac0-4966-8d71-e1c353d3b7db-horizon-secret-key\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:14.999823 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxb2n\" (UniqueName: \"kubernetes.io/projected/03e46e54-bac0-4966-8d71-e1c353d3b7db-kube-api-access-hxb2n\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:14.999937 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-config-data\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.069607 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.103490 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-config\") pod \"6e73b95c-e958-4cec-ac51-5548736d68e3\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.103546 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-svc\") pod \"6e73b95c-e958-4cec-ac51-5548736d68e3\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.103656 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-swift-storage-0\") pod \"6e73b95c-e958-4cec-ac51-5548736d68e3\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.103719 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-nb\") pod \"6e73b95c-e958-4cec-ac51-5548736d68e3\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.103803 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg2sp\" (UniqueName: \"kubernetes.io/projected/6e73b95c-e958-4cec-ac51-5548736d68e3-kube-api-access-fg2sp\") pod \"6e73b95c-e958-4cec-ac51-5548736d68e3\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.103836 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-sb\") pod \"6e73b95c-e958-4cec-ac51-5548736d68e3\" (UID: \"6e73b95c-e958-4cec-ac51-5548736d68e3\") " Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.104065 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03e46e54-bac0-4966-8d71-e1c353d3b7db-logs\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.104097 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-scripts\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.105063 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03e46e54-bac0-4966-8d71-e1c353d3b7db-horizon-secret-key\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.105102 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxb2n\" (UniqueName: \"kubernetes.io/projected/03e46e54-bac0-4966-8d71-e1c353d3b7db-kube-api-access-hxb2n\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.105166 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-config-data\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.106064 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-scripts\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.106332 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03e46e54-bac0-4966-8d71-e1c353d3b7db-logs\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.106338 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-config-data\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.127017 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03e46e54-bac0-4966-8d71-e1c353d3b7db-horizon-secret-key\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.129492 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxb2n\" (UniqueName: \"kubernetes.io/projected/03e46e54-bac0-4966-8d71-e1c353d3b7db-kube-api-access-hxb2n\") pod \"horizon-76569bb57f-cfg2r\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.144110 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e73b95c-e958-4cec-ac51-5548736d68e3-kube-api-access-fg2sp" (OuterVolumeSpecName: "kube-api-access-fg2sp") pod "6e73b95c-e958-4cec-ac51-5548736d68e3" (UID: "6e73b95c-e958-4cec-ac51-5548736d68e3"). InnerVolumeSpecName "kube-api-access-fg2sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.174808 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6e73b95c-e958-4cec-ac51-5548736d68e3" (UID: "6e73b95c-e958-4cec-ac51-5548736d68e3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.201348 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6e73b95c-e958-4cec-ac51-5548736d68e3" (UID: "6e73b95c-e958-4cec-ac51-5548736d68e3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.206690 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.212085 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.212191 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg2sp\" (UniqueName: \"kubernetes.io/projected/6e73b95c-e958-4cec-ac51-5548736d68e3-kube-api-access-fg2sp\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.223667 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6e73b95c-e958-4cec-ac51-5548736d68e3" (UID: "6e73b95c-e958-4cec-ac51-5548736d68e3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.228457 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6e73b95c-e958-4cec-ac51-5548736d68e3" (UID: "6e73b95c-e958-4cec-ac51-5548736d68e3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.238586 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-config" (OuterVolumeSpecName: "config") pod "6e73b95c-e958-4cec-ac51-5548736d68e3" (UID: "6e73b95c-e958-4cec-ac51-5548736d68e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.289196 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.317172 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.317198 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.317208 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e73b95c-e958-4cec-ac51-5548736d68e3-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.365213 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d244c39e-7702-4abd-bdf1-8d6331369ff3","Type":"ContainerStarted","Data":"6ccee2bfed824a2fc0dc2f1a31909ff9877a10b4aed064d38f0d84b27176cacd"} Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.395245 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.402629 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-qbpr2" event={"ID":"6e73b95c-e958-4cec-ac51-5548736d68e3","Type":"ContainerDied","Data":"fa6b2fd84505146f0858182041888ccf3a6f44b0810a9313dc756478a22d90b9"} Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.402679 4685 scope.go:117] "RemoveContainer" containerID="2f6812433d1a45e5d89157b77ad2b651a169d57bd879d04e468c21241b5d1e98" Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.425092 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e1413696-131d-416c-a29c-9380008922ae","Type":"ContainerStarted","Data":"23e1054ef243e1bf00e1af4b95267eb23fe2a72278f34be4947f6ebecb8b6425"} Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.427906 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerStarted","Data":"466cdedf8237f4fdc31b7c65f4443ca7566eb8bafa585ff00337163bb29d57d2"} Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.433659 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dff9fbf89-bfx5f" event={"ID":"8bc68e28-dc5a-4af7-803d-a5ea85de0827","Type":"ContainerStarted","Data":"eadee3402093481b09c286dd7f6709c4864ec9e12ffd779faca0d3ceb84dfdfc"} Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.521015 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-qbpr2"] Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.538256 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-qbpr2"] Dec 02 10:20:15 crc kubenswrapper[4685]: I1202 10:20:15.931135 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e73b95c-e958-4cec-ac51-5548736d68e3" path="/var/lib/kubelet/pods/6e73b95c-e958-4cec-ac51-5548736d68e3/volumes" Dec 02 10:20:16 crc kubenswrapper[4685]: I1202 10:20:16.212812 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-76569bb57f-cfg2r"] Dec 02 10:20:16 crc kubenswrapper[4685]: W1202 10:20:16.268869 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03e46e54_bac0_4966_8d71_e1c353d3b7db.slice/crio-5babfe137f8fc88cd9a7f1939e9782fee2dbe29e44c31373f369b9f16c64639d WatchSource:0}: Error finding container 5babfe137f8fc88cd9a7f1939e9782fee2dbe29e44c31373f369b9f16c64639d: Status 404 returned error can't find the container with id 5babfe137f8fc88cd9a7f1939e9782fee2dbe29e44c31373f369b9f16c64639d Dec 02 10:20:16 crc kubenswrapper[4685]: I1202 10:20:16.442830 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76569bb57f-cfg2r" event={"ID":"03e46e54-bac0-4966-8d71-e1c353d3b7db","Type":"ContainerStarted","Data":"5babfe137f8fc88cd9a7f1939e9782fee2dbe29e44c31373f369b9f16c64639d"} Dec 02 10:20:16 crc kubenswrapper[4685]: I1202 10:20:16.447179 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d244c39e-7702-4abd-bdf1-8d6331369ff3","Type":"ContainerStarted","Data":"fbf69608ad2125eaaf1ae1b4d740762d61ee3a2e2f2b48ded23adb4402992496"} Dec 02 10:20:16 crc kubenswrapper[4685]: I1202 10:20:16.454189 4685 generic.go:334] "Generic (PLEG): container finished" podID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerID="8260809ae593e36bc6df5306fd4d55954f7843f3949584651ae0f038890131cc" exitCode=0 Dec 02 10:20:16 crc kubenswrapper[4685]: I1202 10:20:16.454241 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" event={"ID":"ae11b944-e16a-4404-94b7-c28fec4c5cc4","Type":"ContainerDied","Data":"8260809ae593e36bc6df5306fd4d55954f7843f3949584651ae0f038890131cc"} Dec 02 10:20:17 crc kubenswrapper[4685]: I1202 10:20:17.492117 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e1413696-131d-416c-a29c-9380008922ae","Type":"ContainerStarted","Data":"730fed644ccbf2d79928dad514f381723a6bb68426630c6c6213c4ffa552d840"} Dec 02 10:20:17 crc kubenswrapper[4685]: I1202 10:20:17.518684 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" event={"ID":"ae11b944-e16a-4404-94b7-c28fec4c5cc4","Type":"ContainerStarted","Data":"577999921edcc5b692f156a00b9dc635423c1554902c40cbf73e4f5bacdc17cd"} Dec 02 10:20:17 crc kubenswrapper[4685]: I1202 10:20:17.518925 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:17 crc kubenswrapper[4685]: I1202 10:20:17.542355 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" podStartSLOduration=5.542334836 podStartE2EDuration="5.542334836s" podCreationTimestamp="2025-12-02 10:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:17.541077572 +0000 UTC m=+1109.912851726" watchObservedRunningTime="2025-12-02 10:20:17.542334836 +0000 UTC m=+1109.914108990" Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.531888 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e1413696-131d-416c-a29c-9380008922ae","Type":"ContainerStarted","Data":"4544fd86d56ad967465af5715e121d0b61706c06af07a6aa35a4bb540ffa155d"} Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.532006 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-log" containerID="cri-o://730fed644ccbf2d79928dad514f381723a6bb68426630c6c6213c4ffa552d840" gracePeriod=30 Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.532034 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-httpd" containerID="cri-o://4544fd86d56ad967465af5715e121d0b61706c06af07a6aa35a4bb540ffa155d" gracePeriod=30 Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.542717 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-log" containerID="cri-o://fbf69608ad2125eaaf1ae1b4d740762d61ee3a2e2f2b48ded23adb4402992496" gracePeriod=30 Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.542720 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d244c39e-7702-4abd-bdf1-8d6331369ff3","Type":"ContainerStarted","Data":"03c97d3f6ac27e6cb6cbc9423329096b0e65f7239400f0b893a6855b32e41a72"} Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.542847 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-httpd" containerID="cri-o://03c97d3f6ac27e6cb6cbc9423329096b0e65f7239400f0b893a6855b32e41a72" gracePeriod=30 Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.564375 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.564357261 podStartE2EDuration="6.564357261s" podCreationTimestamp="2025-12-02 10:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:18.558224533 +0000 UTC m=+1110.929998687" watchObservedRunningTime="2025-12-02 10:20:18.564357261 +0000 UTC m=+1110.936131415" Dec 02 10:20:18 crc kubenswrapper[4685]: I1202 10:20:18.598808 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.598785276 podStartE2EDuration="6.598785276s" podCreationTimestamp="2025-12-02 10:20:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:18.586445288 +0000 UTC m=+1110.958219442" watchObservedRunningTime="2025-12-02 10:20:18.598785276 +0000 UTC m=+1110.970559430" Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.572052 4685 generic.go:334] "Generic (PLEG): container finished" podID="e1413696-131d-416c-a29c-9380008922ae" containerID="4544fd86d56ad967465af5715e121d0b61706c06af07a6aa35a4bb540ffa155d" exitCode=0 Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.572470 4685 generic.go:334] "Generic (PLEG): container finished" podID="e1413696-131d-416c-a29c-9380008922ae" containerID="730fed644ccbf2d79928dad514f381723a6bb68426630c6c6213c4ffa552d840" exitCode=143 Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.572108 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e1413696-131d-416c-a29c-9380008922ae","Type":"ContainerDied","Data":"4544fd86d56ad967465af5715e121d0b61706c06af07a6aa35a4bb540ffa155d"} Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.573327 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e1413696-131d-416c-a29c-9380008922ae","Type":"ContainerDied","Data":"730fed644ccbf2d79928dad514f381723a6bb68426630c6c6213c4ffa552d840"} Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.580908 4685 generic.go:334] "Generic (PLEG): container finished" podID="3fe281bb-9242-40f2-9dea-4a0a0fdb066d" containerID="2d509261c9f62fce41299f0f9b3334a812421998abe7e1d7cb0de9f9bb1e0e16" exitCode=0 Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.580989 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2zzg" event={"ID":"3fe281bb-9242-40f2-9dea-4a0a0fdb066d","Type":"ContainerDied","Data":"2d509261c9f62fce41299f0f9b3334a812421998abe7e1d7cb0de9f9bb1e0e16"} Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.594938 4685 generic.go:334] "Generic (PLEG): container finished" podID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerID="03c97d3f6ac27e6cb6cbc9423329096b0e65f7239400f0b893a6855b32e41a72" exitCode=0 Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.594969 4685 generic.go:334] "Generic (PLEG): container finished" podID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerID="fbf69608ad2125eaaf1ae1b4d740762d61ee3a2e2f2b48ded23adb4402992496" exitCode=143 Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.594983 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d244c39e-7702-4abd-bdf1-8d6331369ff3","Type":"ContainerDied","Data":"03c97d3f6ac27e6cb6cbc9423329096b0e65f7239400f0b893a6855b32e41a72"} Dec 02 10:20:19 crc kubenswrapper[4685]: I1202 10:20:19.595076 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d244c39e-7702-4abd-bdf1-8d6331369ff3","Type":"ContainerDied","Data":"fbf69608ad2125eaaf1ae1b4d740762d61ee3a2e2f2b48ded23adb4402992496"} Dec 02 10:20:20 crc kubenswrapper[4685]: I1202 10:20:20.987374 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7dff9fbf89-bfx5f"] Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.040320 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5cb9cd6bb4-c4gq9"] Dec 02 10:20:21 crc kubenswrapper[4685]: E1202 10:20:21.040770 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e73b95c-e958-4cec-ac51-5548736d68e3" containerName="init" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.040784 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e73b95c-e958-4cec-ac51-5548736d68e3" containerName="init" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.040978 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e73b95c-e958-4cec-ac51-5548736d68e3" containerName="init" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.041962 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.045930 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.072890 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cb9cd6bb4-c4gq9"] Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127644 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-config-data\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127707 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-scripts\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127750 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whbg4\" (UniqueName: \"kubernetes.io/projected/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-kube-api-access-whbg4\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127767 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-secret-key\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127803 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-logs\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127833 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-combined-ca-bundle\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.127857 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-tls-certs\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.142249 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-76569bb57f-cfg2r"] Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.182439 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-79bf856964-lh2w8"] Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.183921 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.232847 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79bf856964-lh2w8"] Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.234499 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-config-data\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.236318 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-scripts\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.236404 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whbg4\" (UniqueName: \"kubernetes.io/projected/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-kube-api-access-whbg4\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.236426 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-secret-key\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.236494 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-logs\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.236544 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-combined-ca-bundle\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.236595 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-tls-certs\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.246081 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-scripts\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.249427 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-config-data\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.251998 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-logs\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.281812 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-tls-certs\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.309544 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-secret-key\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.309759 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-combined-ca-bundle\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.310438 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whbg4\" (UniqueName: \"kubernetes.io/projected/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-kube-api-access-whbg4\") pod \"horizon-5cb9cd6bb4-c4gq9\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.343632 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d251819-b2e8-4cc5-b56c-977ea549bf2f-scripts\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.344148 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-horizon-secret-key\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.344312 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d251819-b2e8-4cc5-b56c-977ea549bf2f-logs\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.344389 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d251819-b2e8-4cc5-b56c-977ea549bf2f-config-data\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.344496 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-horizon-tls-certs\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.344585 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-combined-ca-bundle\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.344679 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdxj4\" (UniqueName: \"kubernetes.io/projected/3d251819-b2e8-4cc5-b56c-977ea549bf2f-kube-api-access-mdxj4\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.373235 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.446536 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d251819-b2e8-4cc5-b56c-977ea549bf2f-logs\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.446871 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d251819-b2e8-4cc5-b56c-977ea549bf2f-config-data\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.446902 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-horizon-tls-certs\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.446930 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-combined-ca-bundle\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.446968 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdxj4\" (UniqueName: \"kubernetes.io/projected/3d251819-b2e8-4cc5-b56c-977ea549bf2f-kube-api-access-mdxj4\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.446989 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d251819-b2e8-4cc5-b56c-977ea549bf2f-scripts\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.447008 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-horizon-secret-key\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.447605 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d251819-b2e8-4cc5-b56c-977ea549bf2f-logs\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.448289 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3d251819-b2e8-4cc5-b56c-977ea549bf2f-scripts\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.449218 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3d251819-b2e8-4cc5-b56c-977ea549bf2f-config-data\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.453070 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-horizon-tls-certs\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.453947 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-combined-ca-bundle\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.456479 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3d251819-b2e8-4cc5-b56c-977ea549bf2f-horizon-secret-key\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.469771 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdxj4\" (UniqueName: \"kubernetes.io/projected/3d251819-b2e8-4cc5-b56c-977ea549bf2f-kube-api-access-mdxj4\") pod \"horizon-79bf856964-lh2w8\" (UID: \"3d251819-b2e8-4cc5-b56c-977ea549bf2f\") " pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:21 crc kubenswrapper[4685]: I1202 10:20:21.545609 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:20:22 crc kubenswrapper[4685]: I1202 10:20:22.939104 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:20:23 crc kubenswrapper[4685]: I1202 10:20:23.008094 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-4z2pd"] Dec 02 10:20:23 crc kubenswrapper[4685]: I1202 10:20:23.008828 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" containerID="cri-o://d91d88942af04a2dbe44f8ded4030504a1a921f7253df6947bafaba58dac1f0d" gracePeriod=10 Dec 02 10:20:23 crc kubenswrapper[4685]: I1202 10:20:23.657175 4685 generic.go:334] "Generic (PLEG): container finished" podID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerID="d91d88942af04a2dbe44f8ded4030504a1a921f7253df6947bafaba58dac1f0d" exitCode=0 Dec 02 10:20:23 crc kubenswrapper[4685]: I1202 10:20:23.657463 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-4z2pd" event={"ID":"c0fc2dca-b50f-47c7-b772-e4426cfdda3e","Type":"ContainerDied","Data":"d91d88942af04a2dbe44f8ded4030504a1a921f7253df6947bafaba58dac1f0d"} Dec 02 10:20:24 crc kubenswrapper[4685]: I1202 10:20:24.919515 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.331161 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463024 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-logs\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463232 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-httpd-run\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463291 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-config-data\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463324 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-combined-ca-bundle\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463357 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463393 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4qmh\" (UniqueName: \"kubernetes.io/projected/d244c39e-7702-4abd-bdf1-8d6331369ff3-kube-api-access-v4qmh\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463448 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-public-tls-certs\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463495 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-scripts\") pod \"d244c39e-7702-4abd-bdf1-8d6331369ff3\" (UID: \"d244c39e-7702-4abd-bdf1-8d6331369ff3\") " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.463939 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-logs" (OuterVolumeSpecName: "logs") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.464161 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.468502 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.469968 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d244c39e-7702-4abd-bdf1-8d6331369ff3-kube-api-access-v4qmh" (OuterVolumeSpecName: "kube-api-access-v4qmh") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "kube-api-access-v4qmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.494075 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-scripts" (OuterVolumeSpecName: "scripts") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.504207 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.529135 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.537118 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-config-data" (OuterVolumeSpecName: "config-data") pod "d244c39e-7702-4abd-bdf1-8d6331369ff3" (UID: "d244c39e-7702-4abd-bdf1-8d6331369ff3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565570 4685 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565598 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565607 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565646 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565656 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4qmh\" (UniqueName: \"kubernetes.io/projected/d244c39e-7702-4abd-bdf1-8d6331369ff3-kube-api-access-v4qmh\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565665 4685 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565672 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d244c39e-7702-4abd-bdf1-8d6331369ff3-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.565680 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d244c39e-7702-4abd-bdf1-8d6331369ff3-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.582374 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.667057 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.692005 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d244c39e-7702-4abd-bdf1-8d6331369ff3","Type":"ContainerDied","Data":"6ccee2bfed824a2fc0dc2f1a31909ff9877a10b4aed064d38f0d84b27176cacd"} Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.692063 4685 scope.go:117] "RemoveContainer" containerID="03c97d3f6ac27e6cb6cbc9423329096b0e65f7239400f0b893a6855b32e41a72" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.692090 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.733529 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.756606 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.770042 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:27 crc kubenswrapper[4685]: E1202 10:20:27.770491 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-httpd" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.770506 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-httpd" Dec 02 10:20:27 crc kubenswrapper[4685]: E1202 10:20:27.770518 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-log" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.770524 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-log" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.770712 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-httpd" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.770738 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" containerName="glance-log" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.771892 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.775054 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.775255 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.780373 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869188 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rs4c\" (UniqueName: \"kubernetes.io/projected/0fe8705d-216c-4be5-9fd4-41671563b136-kube-api-access-4rs4c\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869246 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-config-data\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869367 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869399 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-scripts\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869423 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-logs\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869444 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869479 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.869508 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.914715 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d244c39e-7702-4abd-bdf1-8d6331369ff3" path="/var/lib/kubelet/pods/d244c39e-7702-4abd-bdf1-8d6331369ff3/volumes" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.974502 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.974579 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.974708 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rs4c\" (UniqueName: \"kubernetes.io/projected/0fe8705d-216c-4be5-9fd4-41671563b136-kube-api-access-4rs4c\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.974799 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-config-data\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.974908 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.974961 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-scripts\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.975012 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-logs\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.975038 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.975512 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.977072 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.986358 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-logs\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.989937 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.990198 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-scripts\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.995242 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:27 crc kubenswrapper[4685]: I1202 10:20:27.995863 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rs4c\" (UniqueName: \"kubernetes.io/projected/0fe8705d-216c-4be5-9fd4-41671563b136-kube-api-access-4rs4c\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:28 crc kubenswrapper[4685]: I1202 10:20:28.006550 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-config-data\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:28 crc kubenswrapper[4685]: I1202 10:20:28.021670 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " pod="openstack/glance-default-external-api-0" Dec 02 10:20:28 crc kubenswrapper[4685]: I1202 10:20:28.102637 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:20:29 crc kubenswrapper[4685]: E1202 10:20:29.151100 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 02 10:20:29 crc kubenswrapper[4685]: E1202 10:20:29.151574 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s62bt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-9m45k_openstack(fcc4c853-7816-4912-9c98-55c29ae90396): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:20:29 crc kubenswrapper[4685]: E1202 10:20:29.152657 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-9m45k" podUID="fcc4c853-7816-4912-9c98-55c29ae90396" Dec 02 10:20:29 crc kubenswrapper[4685]: E1202 10:20:29.720064 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-9m45k" podUID="fcc4c853-7816-4912-9c98-55c29ae90396" Dec 02 10:20:29 crc kubenswrapper[4685]: I1202 10:20:29.920537 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.716819 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.717506 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n54fh7fh59h57ch5d6h66dh65bh5f7h5bfh668h86h5c5h5bch9chc8h6dh64dhc6h696h6dh7bhb5h549h55h5c8h54bh5f9h595h9dhb5h648h55cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5gmxv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-599497779c-mk6dq_openstack(1abe19bf-791f-4ca4-a98e-f224511c7614): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.719712 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-599497779c-mk6dq" podUID="1abe19bf-791f-4ca4-a98e-f224511c7614" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.724425 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.724591 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n558h54fh7fh65h5bdh659h5dch565h87h656hdh98h698h669h5f9h67ch59ch579h58dh554hc5h69hb6h74h65bhfch65dhb7h97h5ch577h5f6q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xvj87,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7dff9fbf89-bfx5f_openstack(8bc68e28-dc5a-4af7-803d-a5ea85de0827): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.726757 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7dff9fbf89-bfx5f" podUID="8bc68e28-dc5a-4af7-803d-a5ea85de0827" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.756807 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.756948 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n97h8h9bh5ddh659hdfh5d7h74h578h585h544h569h544h5cdhbdhcch649h687hc7hcch57chd5h5c6h5b6h75h546h576h7dh587h698h586h5c8q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hxb2n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-76569bb57f-cfg2r_openstack(03e46e54-bac0-4966-8d71-e1c353d3b7db): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:20:33 crc kubenswrapper[4685]: E1202 10:20:33.759731 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-76569bb57f-cfg2r" podUID="03e46e54-bac0-4966-8d71-e1c353d3b7db" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.765979 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-w2zzg" event={"ID":"3fe281bb-9242-40f2-9dea-4a0a0fdb066d","Type":"ContainerDied","Data":"9478d1ec264dd77a02728b67732d2adcfe4ad8be41fc8c9d7018c1ff112cc74b"} Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.767677 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9478d1ec264dd77a02728b67732d2adcfe4ad8be41fc8c9d7018c1ff112cc74b" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.821619 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.881609 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x77b9\" (UniqueName: \"kubernetes.io/projected/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-kube-api-access-x77b9\") pod \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.882322 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-fernet-keys\") pod \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.882855 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-config-data\") pod \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.882912 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-credential-keys\") pod \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.883008 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-scripts\") pod \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.883112 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-combined-ca-bundle\") pod \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\" (UID: \"3fe281bb-9242-40f2-9dea-4a0a0fdb066d\") " Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.902304 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3fe281bb-9242-40f2-9dea-4a0a0fdb066d" (UID: "3fe281bb-9242-40f2-9dea-4a0a0fdb066d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.904063 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3fe281bb-9242-40f2-9dea-4a0a0fdb066d" (UID: "3fe281bb-9242-40f2-9dea-4a0a0fdb066d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.909798 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-kube-api-access-x77b9" (OuterVolumeSpecName: "kube-api-access-x77b9") pod "3fe281bb-9242-40f2-9dea-4a0a0fdb066d" (UID: "3fe281bb-9242-40f2-9dea-4a0a0fdb066d"). InnerVolumeSpecName "kube-api-access-x77b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.910949 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-scripts" (OuterVolumeSpecName: "scripts") pod "3fe281bb-9242-40f2-9dea-4a0a0fdb066d" (UID: "3fe281bb-9242-40f2-9dea-4a0a0fdb066d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.931271 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3fe281bb-9242-40f2-9dea-4a0a0fdb066d" (UID: "3fe281bb-9242-40f2-9dea-4a0a0fdb066d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.973793 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-config-data" (OuterVolumeSpecName: "config-data") pod "3fe281bb-9242-40f2-9dea-4a0a0fdb066d" (UID: "3fe281bb-9242-40f2-9dea-4a0a0fdb066d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.984876 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.984902 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x77b9\" (UniqueName: \"kubernetes.io/projected/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-kube-api-access-x77b9\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.984916 4685 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.984935 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.984943 4685 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:33 crc kubenswrapper[4685]: I1202 10:20:33.984951 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3fe281bb-9242-40f2-9dea-4a0a0fdb066d-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:34 crc kubenswrapper[4685]: I1202 10:20:34.779540 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-w2zzg" Dec 02 10:20:34 crc kubenswrapper[4685]: I1202 10:20:34.919774 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Dec 02 10:20:34 crc kubenswrapper[4685]: I1202 10:20:34.920300 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.022481 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-w2zzg"] Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.030489 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-w2zzg"] Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.117381 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-v9lzr"] Dec 02 10:20:35 crc kubenswrapper[4685]: E1202 10:20:35.117969 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fe281bb-9242-40f2-9dea-4a0a0fdb066d" containerName="keystone-bootstrap" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.117988 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fe281bb-9242-40f2-9dea-4a0a0fdb066d" containerName="keystone-bootstrap" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.118218 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fe281bb-9242-40f2-9dea-4a0a0fdb066d" containerName="keystone-bootstrap" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.118944 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.122323 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.122789 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.123738 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.123738 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.123867 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7kkp" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.136029 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v9lzr"] Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.221004 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-combined-ca-bundle\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.221095 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-credential-keys\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.221205 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-fernet-keys\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.221240 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcnpn\" (UniqueName: \"kubernetes.io/projected/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-kube-api-access-qcnpn\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.221282 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-scripts\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.221386 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-config-data\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.322922 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-combined-ca-bundle\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.322971 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-credential-keys\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.323030 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-fernet-keys\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.323063 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcnpn\" (UniqueName: \"kubernetes.io/projected/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-kube-api-access-qcnpn\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.323082 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-scripts\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.323108 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-config-data\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.328930 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-scripts\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.330009 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-config-data\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.331574 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-fernet-keys\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.333978 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-credential-keys\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.343614 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-combined-ca-bundle\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.351353 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcnpn\" (UniqueName: \"kubernetes.io/projected/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-kube-api-access-qcnpn\") pod \"keystone-bootstrap-v9lzr\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.449698 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:20:35 crc kubenswrapper[4685]: I1202 10:20:35.920350 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fe281bb-9242-40f2-9dea-4a0a0fdb066d" path="/var/lib/kubelet/pods/3fe281bb-9242-40f2-9dea-4a0a0fdb066d/volumes" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.010670 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144432 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-httpd-run\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144542 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6zzf\" (UniqueName: \"kubernetes.io/projected/e1413696-131d-416c-a29c-9380008922ae-kube-api-access-n6zzf\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144621 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-config-data\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144670 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-internal-tls-certs\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144690 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-logs\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144767 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-scripts\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144808 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144842 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-combined-ca-bundle\") pod \"e1413696-131d-416c-a29c-9380008922ae\" (UID: \"e1413696-131d-416c-a29c-9380008922ae\") " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.144927 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.145169 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-logs" (OuterVolumeSpecName: "logs") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.145596 4685 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.145619 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1413696-131d-416c-a29c-9380008922ae-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.153263 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.153301 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-scripts" (OuterVolumeSpecName: "scripts") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.153351 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1413696-131d-416c-a29c-9380008922ae-kube-api-access-n6zzf" (OuterVolumeSpecName: "kube-api-access-n6zzf") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "kube-api-access-n6zzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.178777 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.204578 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-config-data" (OuterVolumeSpecName: "config-data") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.215671 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e1413696-131d-416c-a29c-9380008922ae" (UID: "e1413696-131d-416c-a29c-9380008922ae"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.247728 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.247794 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.247813 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.247829 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6zzf\" (UniqueName: \"kubernetes.io/projected/e1413696-131d-416c-a29c-9380008922ae-kube-api-access-n6zzf\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.247841 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.247852 4685 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1413696-131d-416c-a29c-9380008922ae-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.270228 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.350187 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.797056 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e1413696-131d-416c-a29c-9380008922ae","Type":"ContainerDied","Data":"23e1054ef243e1bf00e1af4b95267eb23fe2a72278f34be4947f6ebecb8b6425"} Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.797102 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.834473 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.842473 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.862742 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:36 crc kubenswrapper[4685]: E1202 10:20:36.863097 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-httpd" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.863115 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-httpd" Dec 02 10:20:36 crc kubenswrapper[4685]: E1202 10:20:36.863144 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-log" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.863152 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-log" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.863312 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-httpd" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.863328 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1413696-131d-416c-a29c-9380008922ae" containerName="glance-log" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.864356 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.870371 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.881515 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.895228 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963004 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963078 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963123 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963143 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963169 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963189 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngt6m\" (UniqueName: \"kubernetes.io/projected/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-kube-api-access-ngt6m\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963205 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:36 crc kubenswrapper[4685]: I1202 10:20:36.963224 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-logs\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.064421 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.064800 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.064838 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.064873 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngt6m\" (UniqueName: \"kubernetes.io/projected/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-kube-api-access-ngt6m\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.064898 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.064924 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-logs\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.065010 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.065074 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.065491 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.065775 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.066010 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-logs\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.069692 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.070087 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.070132 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.075121 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.088511 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngt6m\" (UniqueName: \"kubernetes.io/projected/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-kube-api-access-ngt6m\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.106773 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.185846 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.806504 4685 generic.go:334] "Generic (PLEG): container finished" podID="a7e4b942-530d-42f2-8ba6-f432991c850d" containerID="861a570cc2c079de5fc578479fd15720d12453e06679d6efe975970bc7ae5b65" exitCode=0 Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.806550 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f47mj" event={"ID":"a7e4b942-530d-42f2-8ba6-f432991c850d","Type":"ContainerDied","Data":"861a570cc2c079de5fc578479fd15720d12453e06679d6efe975970bc7ae5b65"} Dec 02 10:20:37 crc kubenswrapper[4685]: I1202 10:20:37.909281 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1413696-131d-416c-a29c-9380008922ae" path="/var/lib/kubelet/pods/e1413696-131d-416c-a29c-9380008922ae/volumes" Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.147522 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.148118 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.148166 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.149379 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a4f65c3934f925bd4944d91a313fa6182d1454a9f7eaa524038fcf17bffe22c2"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.149440 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://a4f65c3934f925bd4944d91a313fa6182d1454a9f7eaa524038fcf17bffe22c2" gracePeriod=600 Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.862813 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="a4f65c3934f925bd4944d91a313fa6182d1454a9f7eaa524038fcf17bffe22c2" exitCode=0 Dec 02 10:20:42 crc kubenswrapper[4685]: I1202 10:20:42.862860 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"a4f65c3934f925bd4944d91a313fa6182d1454a9f7eaa524038fcf17bffe22c2"} Dec 02 10:20:44 crc kubenswrapper[4685]: I1202 10:20:44.919694 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.057670 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.065013 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.068370 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.169803 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8bc68e28-dc5a-4af7-803d-a5ea85de0827-horizon-secret-key\") pod \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.169896 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bc68e28-dc5a-4af7-803d-a5ea85de0827-logs\") pod \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.169932 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1abe19bf-791f-4ca4-a98e-f224511c7614-logs\") pod \"1abe19bf-791f-4ca4-a98e-f224511c7614\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.169960 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-config-data\") pod \"03e46e54-bac0-4966-8d71-e1c353d3b7db\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.169986 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvj87\" (UniqueName: \"kubernetes.io/projected/8bc68e28-dc5a-4af7-803d-a5ea85de0827-kube-api-access-xvj87\") pod \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170037 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxb2n\" (UniqueName: \"kubernetes.io/projected/03e46e54-bac0-4966-8d71-e1c353d3b7db-kube-api-access-hxb2n\") pod \"03e46e54-bac0-4966-8d71-e1c353d3b7db\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170176 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-scripts\") pod \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170217 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-scripts\") pod \"03e46e54-bac0-4966-8d71-e1c353d3b7db\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170251 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gmxv\" (UniqueName: \"kubernetes.io/projected/1abe19bf-791f-4ca4-a98e-f224511c7614-kube-api-access-5gmxv\") pod \"1abe19bf-791f-4ca4-a98e-f224511c7614\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170267 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-config-data\") pod \"1abe19bf-791f-4ca4-a98e-f224511c7614\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170309 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-config-data\") pod \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\" (UID: \"8bc68e28-dc5a-4af7-803d-a5ea85de0827\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170331 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03e46e54-bac0-4966-8d71-e1c353d3b7db-logs\") pod \"03e46e54-bac0-4966-8d71-e1c353d3b7db\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170353 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03e46e54-bac0-4966-8d71-e1c353d3b7db-horizon-secret-key\") pod \"03e46e54-bac0-4966-8d71-e1c353d3b7db\" (UID: \"03e46e54-bac0-4966-8d71-e1c353d3b7db\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170403 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-scripts\") pod \"1abe19bf-791f-4ca4-a98e-f224511c7614\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.170438 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1abe19bf-791f-4ca4-a98e-f224511c7614-horizon-secret-key\") pod \"1abe19bf-791f-4ca4-a98e-f224511c7614\" (UID: \"1abe19bf-791f-4ca4-a98e-f224511c7614\") " Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.171046 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bc68e28-dc5a-4af7-803d-a5ea85de0827-logs" (OuterVolumeSpecName: "logs") pod "8bc68e28-dc5a-4af7-803d-a5ea85de0827" (UID: "8bc68e28-dc5a-4af7-803d-a5ea85de0827"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.171224 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1abe19bf-791f-4ca4-a98e-f224511c7614-logs" (OuterVolumeSpecName: "logs") pod "1abe19bf-791f-4ca4-a98e-f224511c7614" (UID: "1abe19bf-791f-4ca4-a98e-f224511c7614"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.171458 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-config-data" (OuterVolumeSpecName: "config-data") pod "8bc68e28-dc5a-4af7-803d-a5ea85de0827" (UID: "8bc68e28-dc5a-4af7-803d-a5ea85de0827"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172086 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03e46e54-bac0-4966-8d71-e1c353d3b7db-logs" (OuterVolumeSpecName: "logs") pod "03e46e54-bac0-4966-8d71-e1c353d3b7db" (UID: "03e46e54-bac0-4966-8d71-e1c353d3b7db"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172357 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-scripts" (OuterVolumeSpecName: "scripts") pod "8bc68e28-dc5a-4af7-803d-a5ea85de0827" (UID: "8bc68e28-dc5a-4af7-803d-a5ea85de0827"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172489 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-scripts" (OuterVolumeSpecName: "scripts") pod "1abe19bf-791f-4ca4-a98e-f224511c7614" (UID: "1abe19bf-791f-4ca4-a98e-f224511c7614"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172588 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-config-data" (OuterVolumeSpecName: "config-data") pod "1abe19bf-791f-4ca4-a98e-f224511c7614" (UID: "1abe19bf-791f-4ca4-a98e-f224511c7614"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172677 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-scripts" (OuterVolumeSpecName: "scripts") pod "03e46e54-bac0-4966-8d71-e1c353d3b7db" (UID: "03e46e54-bac0-4966-8d71-e1c353d3b7db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172716 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8bc68e28-dc5a-4af7-803d-a5ea85de0827-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172742 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1abe19bf-791f-4ca4-a98e-f224511c7614-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.172755 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.174596 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-config-data" (OuterVolumeSpecName: "config-data") pod "03e46e54-bac0-4966-8d71-e1c353d3b7db" (UID: "03e46e54-bac0-4966-8d71-e1c353d3b7db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.178518 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1abe19bf-791f-4ca4-a98e-f224511c7614-kube-api-access-5gmxv" (OuterVolumeSpecName: "kube-api-access-5gmxv") pod "1abe19bf-791f-4ca4-a98e-f224511c7614" (UID: "1abe19bf-791f-4ca4-a98e-f224511c7614"). InnerVolumeSpecName "kube-api-access-5gmxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.179844 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1abe19bf-791f-4ca4-a98e-f224511c7614-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1abe19bf-791f-4ca4-a98e-f224511c7614" (UID: "1abe19bf-791f-4ca4-a98e-f224511c7614"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.180828 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bc68e28-dc5a-4af7-803d-a5ea85de0827-kube-api-access-xvj87" (OuterVolumeSpecName: "kube-api-access-xvj87") pod "8bc68e28-dc5a-4af7-803d-a5ea85de0827" (UID: "8bc68e28-dc5a-4af7-803d-a5ea85de0827"). InnerVolumeSpecName "kube-api-access-xvj87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.180963 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03e46e54-bac0-4966-8d71-e1c353d3b7db-kube-api-access-hxb2n" (OuterVolumeSpecName: "kube-api-access-hxb2n") pod "03e46e54-bac0-4966-8d71-e1c353d3b7db" (UID: "03e46e54-bac0-4966-8d71-e1c353d3b7db"). InnerVolumeSpecName "kube-api-access-hxb2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.199870 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bc68e28-dc5a-4af7-803d-a5ea85de0827-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "8bc68e28-dc5a-4af7-803d-a5ea85de0827" (UID: "8bc68e28-dc5a-4af7-803d-a5ea85de0827"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.200106 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03e46e54-bac0-4966-8d71-e1c353d3b7db-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "03e46e54-bac0-4966-8d71-e1c353d3b7db" (UID: "03e46e54-bac0-4966-8d71-e1c353d3b7db"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274365 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274416 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gmxv\" (UniqueName: \"kubernetes.io/projected/1abe19bf-791f-4ca4-a98e-f224511c7614-kube-api-access-5gmxv\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274434 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274447 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03e46e54-bac0-4966-8d71-e1c353d3b7db-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274459 4685 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/03e46e54-bac0-4966-8d71-e1c353d3b7db-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274471 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1abe19bf-791f-4ca4-a98e-f224511c7614-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274485 4685 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1abe19bf-791f-4ca4-a98e-f224511c7614-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274500 4685 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8bc68e28-dc5a-4af7-803d-a5ea85de0827-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274512 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/03e46e54-bac0-4966-8d71-e1c353d3b7db-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274522 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvj87\" (UniqueName: \"kubernetes.io/projected/8bc68e28-dc5a-4af7-803d-a5ea85de0827-kube-api-access-xvj87\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274534 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxb2n\" (UniqueName: \"kubernetes.io/projected/03e46e54-bac0-4966-8d71-e1c353d3b7db-kube-api-access-hxb2n\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.274545 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bc68e28-dc5a-4af7-803d-a5ea85de0827-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:48 crc kubenswrapper[4685]: E1202 10:20:48.884727 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 02 10:20:48 crc kubenswrapper[4685]: E1202 10:20:48.885176 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n69h5ch654h575h597h549hchb8hc6hc6h5fhdfh6fh5ch9dh685h56dh578h5d7h664h5bbh87h59bh6dh64h5c6h59fh688h59ch5bfh9chb8q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tk9mm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(d644bf2c-8162-4066-b3c0-3751842256c9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.923991 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-76569bb57f-cfg2r" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.924013 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-76569bb57f-cfg2r" event={"ID":"03e46e54-bac0-4966-8d71-e1c353d3b7db","Type":"ContainerDied","Data":"5babfe137f8fc88cd9a7f1939e9782fee2dbe29e44c31373f369b9f16c64639d"} Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.925495 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-599497779c-mk6dq" event={"ID":"1abe19bf-791f-4ca4-a98e-f224511c7614","Type":"ContainerDied","Data":"19133f4a64b93ea36afa435c4520f38cf8fba051f4a9f68d010eadeb2fcac5af"} Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.925610 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-599497779c-mk6dq" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.934296 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-4z2pd" event={"ID":"c0fc2dca-b50f-47c7-b772-e4426cfdda3e","Type":"ContainerDied","Data":"cd3c6ed87da3b0d1c93a5e70b46a8f9d8033a76231b29f1ec51ccf1efeb85fba"} Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.934339 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd3c6ed87da3b0d1c93a5e70b46a8f9d8033a76231b29f1ec51ccf1efeb85fba" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.938792 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-f47mj" event={"ID":"a7e4b942-530d-42f2-8ba6-f432991c850d","Type":"ContainerDied","Data":"021da3518388911d36e80b6b1ac555663f23d66881e24a14dfbc8e23e3bba1ea"} Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.938823 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="021da3518388911d36e80b6b1ac555663f23d66881e24a14dfbc8e23e3bba1ea" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.940598 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7dff9fbf89-bfx5f" event={"ID":"8bc68e28-dc5a-4af7-803d-a5ea85de0827","Type":"ContainerDied","Data":"eadee3402093481b09c286dd7f6709c4864ec9e12ffd779faca0d3ceb84dfdfc"} Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.940673 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7dff9fbf89-bfx5f" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.941857 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:48 crc kubenswrapper[4685]: I1202 10:20:48.950740 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.060366 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-599497779c-mk6dq"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.107836 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-599497779c-mk6dq"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116101 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-config\") pod \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116307 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4z95\" (UniqueName: \"kubernetes.io/projected/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-kube-api-access-b4z95\") pod \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116380 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-nb\") pod \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116474 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-sb\") pod \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116633 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjphf\" (UniqueName: \"kubernetes.io/projected/a7e4b942-530d-42f2-8ba6-f432991c850d-kube-api-access-qjphf\") pod \"a7e4b942-530d-42f2-8ba6-f432991c850d\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116691 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-dns-svc\") pod \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\" (UID: \"c0fc2dca-b50f-47c7-b772-e4426cfdda3e\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116744 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-config\") pod \"a7e4b942-530d-42f2-8ba6-f432991c850d\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.116782 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-combined-ca-bundle\") pod \"a7e4b942-530d-42f2-8ba6-f432991c850d\" (UID: \"a7e4b942-530d-42f2-8ba6-f432991c850d\") " Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.148583 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-kube-api-access-b4z95" (OuterVolumeSpecName: "kube-api-access-b4z95") pod "c0fc2dca-b50f-47c7-b772-e4426cfdda3e" (UID: "c0fc2dca-b50f-47c7-b772-e4426cfdda3e"). InnerVolumeSpecName "kube-api-access-b4z95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.152678 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7e4b942-530d-42f2-8ba6-f432991c850d-kube-api-access-qjphf" (OuterVolumeSpecName: "kube-api-access-qjphf") pod "a7e4b942-530d-42f2-8ba6-f432991c850d" (UID: "a7e4b942-530d-42f2-8ba6-f432991c850d"). InnerVolumeSpecName "kube-api-access-qjphf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.156882 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7dff9fbf89-bfx5f"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.167357 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7dff9fbf89-bfx5f"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.178463 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-config" (OuterVolumeSpecName: "config") pod "a7e4b942-530d-42f2-8ba6-f432991c850d" (UID: "a7e4b942-530d-42f2-8ba6-f432991c850d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.190868 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-76569bb57f-cfg2r"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.191807 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c0fc2dca-b50f-47c7-b772-e4426cfdda3e" (UID: "c0fc2dca-b50f-47c7-b772-e4426cfdda3e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.195596 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7e4b942-530d-42f2-8ba6-f432991c850d" (UID: "a7e4b942-530d-42f2-8ba6-f432991c850d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.198571 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-config" (OuterVolumeSpecName: "config") pod "c0fc2dca-b50f-47c7-b772-e4426cfdda3e" (UID: "c0fc2dca-b50f-47c7-b772-e4426cfdda3e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.203161 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-76569bb57f-cfg2r"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.207936 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c0fc2dca-b50f-47c7-b772-e4426cfdda3e" (UID: "c0fc2dca-b50f-47c7-b772-e4426cfdda3e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.223040 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c0fc2dca-b50f-47c7-b772-e4426cfdda3e" (UID: "c0fc2dca-b50f-47c7-b772-e4426cfdda3e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.224584 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.224780 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4z95\" (UniqueName: \"kubernetes.io/projected/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-kube-api-access-b4z95\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.224943 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.225047 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.225141 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjphf\" (UniqueName: \"kubernetes.io/projected/a7e4b942-530d-42f2-8ba6-f432991c850d-kube-api-access-qjphf\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.225235 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0fc2dca-b50f-47c7-b772-e4426cfdda3e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.225333 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.225600 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7e4b942-530d-42f2-8ba6-f432991c850d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.910184 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03e46e54-bac0-4966-8d71-e1c353d3b7db" path="/var/lib/kubelet/pods/03e46e54-bac0-4966-8d71-e1c353d3b7db/volumes" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.911281 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1abe19bf-791f-4ca4-a98e-f224511c7614" path="/var/lib/kubelet/pods/1abe19bf-791f-4ca4-a98e-f224511c7614/volumes" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.911914 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bc68e28-dc5a-4af7-803d-a5ea85de0827" path="/var/lib/kubelet/pods/8bc68e28-dc5a-4af7-803d-a5ea85de0827/volumes" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.920282 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-4z2pd" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: i/o timeout" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.947109 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-4z2pd" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.947109 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-f47mj" Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.983014 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-4z2pd"] Dec 02 10:20:49 crc kubenswrapper[4685]: I1202 10:20:49.994365 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-4z2pd"] Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.214624 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-lcv4t"] Dec 02 10:20:50 crc kubenswrapper[4685]: E1202 10:20:50.215097 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7e4b942-530d-42f2-8ba6-f432991c850d" containerName="neutron-db-sync" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.215114 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7e4b942-530d-42f2-8ba6-f432991c850d" containerName="neutron-db-sync" Dec 02 10:20:50 crc kubenswrapper[4685]: E1202 10:20:50.215135 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="init" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.215143 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="init" Dec 02 10:20:50 crc kubenswrapper[4685]: E1202 10:20:50.215154 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.215161 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.215363 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" containerName="dnsmasq-dns" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.215384 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7e4b942-530d-42f2-8ba6-f432991c850d" containerName="neutron-db-sync" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.216338 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.226234 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-lcv4t"] Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.282682 4685 scope.go:117] "RemoveContainer" containerID="fbf69608ad2125eaaf1ae1b4d740762d61ee3a2e2f2b48ded23adb4402992496" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.342436 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6d7f7b7954-l6dql"] Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.344230 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.346047 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.346086 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhmdh\" (UniqueName: \"kubernetes.io/projected/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-kube-api-access-vhmdh\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.346115 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.346186 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.346210 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-svc\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.346244 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-config\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.347017 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.347027 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.347247 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-p568z" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.347407 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.365387 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d7f7b7954-l6dql"] Dec 02 10:20:50 crc kubenswrapper[4685]: E1202 10:20:50.379239 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 02 10:20:50 crc kubenswrapper[4685]: E1202 10:20:50.379400 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bwrmm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-xx8rz_openstack(5b558b4d-8398-435d-8925-e36325681252): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:20:50 crc kubenswrapper[4685]: E1202 10:20:50.380760 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-xx8rz" podUID="5b558b4d-8398-435d-8925-e36325681252" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447505 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqs68\" (UniqueName: \"kubernetes.io/projected/129679f9-1a88-4185-aafd-512749132f28-kube-api-access-gqs68\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447548 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-combined-ca-bundle\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447623 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447645 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhmdh\" (UniqueName: \"kubernetes.io/projected/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-kube-api-access-vhmdh\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447671 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447698 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-httpd-config\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447741 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447757 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-ovndb-tls-certs\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447775 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-svc\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.447817 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-config\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.454791 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-config\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.462509 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.463299 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.463954 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.464433 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-svc\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.464990 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-config\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.488328 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhmdh\" (UniqueName: \"kubernetes.io/projected/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-kube-api-access-vhmdh\") pod \"dnsmasq-dns-55f844cf75-lcv4t\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.553976 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.556830 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-config\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.556939 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-combined-ca-bundle\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.556986 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqs68\" (UniqueName: \"kubernetes.io/projected/129679f9-1a88-4185-aafd-512749132f28-kube-api-access-gqs68\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.557069 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-httpd-config\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.557150 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-ovndb-tls-certs\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.564828 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-ovndb-tls-certs\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.568379 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-combined-ca-bundle\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.571302 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-httpd-config\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.574942 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-config\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.605577 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqs68\" (UniqueName: \"kubernetes.io/projected/129679f9-1a88-4185-aafd-512749132f28-kube-api-access-gqs68\") pod \"neutron-6d7f7b7954-l6dql\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.609840 4685 scope.go:117] "RemoveContainer" containerID="4544fd86d56ad967465af5715e121d0b61706c06af07a6aa35a4bb540ffa155d" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.684265 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:50 crc kubenswrapper[4685]: I1202 10:20:50.890920 4685 scope.go:117] "RemoveContainer" containerID="730fed644ccbf2d79928dad514f381723a6bb68426630c6c6213c4ffa552d840" Dec 02 10:20:51 crc kubenswrapper[4685]: E1202 10:20:51.039220 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-xx8rz" podUID="5b558b4d-8398-435d-8925-e36325681252" Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.040401 4685 scope.go:117] "RemoveContainer" containerID="655120d7a1362181f84e921b333c967e14aabc7193e6f3de840afaf3bc97e69a" Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.410367 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:20:51 crc kubenswrapper[4685]: W1202 10:20:51.416797 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fe8705d_216c_4be5_9fd4_41671563b136.slice/crio-99eed0d2234972e58ae99006c27a77a7c7a436c60a5c88bf00af184c1493f271 WatchSource:0}: Error finding container 99eed0d2234972e58ae99006c27a77a7c7a436c60a5c88bf00af184c1493f271: Status 404 returned error can't find the container with id 99eed0d2234972e58ae99006c27a77a7c7a436c60a5c88bf00af184c1493f271 Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.477686 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v9lzr"] Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.521053 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.649389 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5cb9cd6bb4-c4gq9"] Dec 02 10:20:51 crc kubenswrapper[4685]: W1202 10:20:51.718855 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d251819_b2e8_4cc5_b56c_977ea549bf2f.slice/crio-b08f636a303433f1c44c95cdfc08eb55ec8a8c262b3180a62fc46f14ce9284af WatchSource:0}: Error finding container b08f636a303433f1c44c95cdfc08eb55ec8a8c262b3180a62fc46f14ce9284af: Status 404 returned error can't find the container with id b08f636a303433f1c44c95cdfc08eb55ec8a8c262b3180a62fc46f14ce9284af Dec 02 10:20:51 crc kubenswrapper[4685]: W1202 10:20:51.724710 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04ab41a3_3d00_4fbd_8e1f_2995658eba9c.slice/crio-ae8622a1340c48e9a93f900675a3c7c247040c99a622c3406731f00f8f1a40c7 WatchSource:0}: Error finding container ae8622a1340c48e9a93f900675a3c7c247040c99a622c3406731f00f8f1a40c7: Status 404 returned error can't find the container with id ae8622a1340c48e9a93f900675a3c7c247040c99a622c3406731f00f8f1a40c7 Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.735680 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-79bf856964-lh2w8"] Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.765956 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-lcv4t"] Dec 02 10:20:51 crc kubenswrapper[4685]: W1202 10:20:51.783512 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff77bc38_e9d1_400d_92cf_eb2154f26fa3.slice/crio-c1fef34587cf1ae16b3962cc270d7da7f064158ddec071d6edbf5f6a1f693377 WatchSource:0}: Error finding container c1fef34587cf1ae16b3962cc270d7da7f064158ddec071d6edbf5f6a1f693377: Status 404 returned error can't find the container with id c1fef34587cf1ae16b3962cc270d7da7f064158ddec071d6edbf5f6a1f693377 Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.887358 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:20:51 crc kubenswrapper[4685]: I1202 10:20:51.923926 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0fc2dca-b50f-47c7-b772-e4426cfdda3e" path="/var/lib/kubelet/pods/c0fc2dca-b50f-47c7-b772-e4426cfdda3e/volumes" Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.058947 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79bf856964-lh2w8" event={"ID":"3d251819-b2e8-4cc5-b56c-977ea549bf2f","Type":"ContainerStarted","Data":"b08f636a303433f1c44c95cdfc08eb55ec8a8c262b3180a62fc46f14ce9284af"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.060958 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0fe8705d-216c-4be5-9fd4-41671563b136","Type":"ContainerStarted","Data":"99eed0d2234972e58ae99006c27a77a7c7a436c60a5c88bf00af184c1493f271"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.063222 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mfhwg" event={"ID":"83601b22-374e-4d44-85a3-eed233d3ff11","Type":"ContainerStarted","Data":"ee91c84bd6126d401419888b4d3ddd01067c50bb40b385ac1c8c9b640048313b"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.072506 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9m45k" event={"ID":"fcc4c853-7816-4912-9c98-55c29ae90396","Type":"ContainerStarted","Data":"7515c3e25a47861ece92cfebe06534215c8bd020b4182bba15c432640bc597e7"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.096137 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-mfhwg" podStartSLOduration=5.866214809 podStartE2EDuration="41.096116417s" podCreationTimestamp="2025-12-02 10:20:11 +0000 UTC" firstStartedPulling="2025-12-02 10:20:13.631987335 +0000 UTC m=+1106.003761489" lastFinishedPulling="2025-12-02 10:20:48.861888943 +0000 UTC m=+1141.233663097" observedRunningTime="2025-12-02 10:20:52.078439071 +0000 UTC m=+1144.450213215" watchObservedRunningTime="2025-12-02 10:20:52.096116417 +0000 UTC m=+1144.467890571" Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.106069 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"b848acb1e4ea5bfba36b62a7bd4c00f067f1b500433e3bc3d9d5085e521463fd"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.114002 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d7f7b7954-l6dql"] Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.118945 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-9m45k" podStartSLOduration=3.391550984 podStartE2EDuration="40.118924653s" podCreationTimestamp="2025-12-02 10:20:12 +0000 UTC" firstStartedPulling="2025-12-02 10:20:13.804131533 +0000 UTC m=+1106.175905687" lastFinishedPulling="2025-12-02 10:20:50.531505202 +0000 UTC m=+1142.903279356" observedRunningTime="2025-12-02 10:20:52.106203134 +0000 UTC m=+1144.477977308" watchObservedRunningTime="2025-12-02 10:20:52.118924653 +0000 UTC m=+1144.490698807" Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.126802 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb9cd6bb4-c4gq9" event={"ID":"04ab41a3-3d00-4fbd-8e1f-2995658eba9c","Type":"ContainerStarted","Data":"ae8622a1340c48e9a93f900675a3c7c247040c99a622c3406731f00f8f1a40c7"} Dec 02 10:20:52 crc kubenswrapper[4685]: W1202 10:20:52.168701 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod129679f9_1a88_4185_aafd_512749132f28.slice/crio-2000381ca6445eb68a99d2b1ea1830ff31f891963539a79120d5c1b08c16ae36 WatchSource:0}: Error finding container 2000381ca6445eb68a99d2b1ea1830ff31f891963539a79120d5c1b08c16ae36: Status 404 returned error can't find the container with id 2000381ca6445eb68a99d2b1ea1830ff31f891963539a79120d5c1b08c16ae36 Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.189750 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v9lzr" event={"ID":"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362","Type":"ContainerStarted","Data":"39fc204b37bc512d209df273924f31f179e3021fb8c5bd1a3cefe9147e61f1bf"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.196947 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" event={"ID":"ff77bc38-e9d1-400d-92cf-eb2154f26fa3","Type":"ContainerStarted","Data":"c1fef34587cf1ae16b3962cc270d7da7f064158ddec071d6edbf5f6a1f693377"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.200440 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345","Type":"ContainerStarted","Data":"85bf843c6e7c5c6b457258b1111a9ac6219f2c174a04d655f376dece2a23f152"} Dec 02 10:20:52 crc kubenswrapper[4685]: I1202 10:20:52.210376 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-v9lzr" podStartSLOduration=17.210355425 podStartE2EDuration="17.210355425s" podCreationTimestamp="2025-12-02 10:20:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:52.207065883 +0000 UTC m=+1144.578840047" watchObservedRunningTime="2025-12-02 10:20:52.210355425 +0000 UTC m=+1144.582129579" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.213299 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79bf856964-lh2w8" event={"ID":"3d251819-b2e8-4cc5-b56c-977ea549bf2f","Type":"ContainerStarted","Data":"e8d6fb2c6ab7cc32b1b22f32770a269771d4c9013806cf040cb6ef22563fc916"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.218540 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0fe8705d-216c-4be5-9fd4-41671563b136","Type":"ContainerStarted","Data":"8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.225713 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7f7b7954-l6dql" event={"ID":"129679f9-1a88-4185-aafd-512749132f28","Type":"ContainerStarted","Data":"ca4cbbbc5e5146fcac5adb5f35ce091207ae56bd84e1efe4b8597de362c39d86"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.225783 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7f7b7954-l6dql" event={"ID":"129679f9-1a88-4185-aafd-512749132f28","Type":"ContainerStarted","Data":"2000381ca6445eb68a99d2b1ea1830ff31f891963539a79120d5c1b08c16ae36"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.235746 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v9lzr" event={"ID":"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362","Type":"ContainerStarted","Data":"ed21cd4795c0f484c817670e9329fabd31dff1365d9260d1fdacf9f928618d6d"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.239142 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6dc8d84c85-x99fw"] Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.240453 4685 generic.go:334] "Generic (PLEG): container finished" podID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerID="eead65235fb4d2aa5aae35b3e65ac543c0d79a20e1a1e64c32e762f1f3f6b479" exitCode=0 Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.240540 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" event={"ID":"ff77bc38-e9d1-400d-92cf-eb2154f26fa3","Type":"ContainerDied","Data":"eead65235fb4d2aa5aae35b3e65ac543c0d79a20e1a1e64c32e762f1f3f6b479"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.240620 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.244266 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.244462 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.256059 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345","Type":"ContainerStarted","Data":"7e9a07892190aef654529f902cb94df662ac02e02b87c4aaa33410a8534f0a95"} Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.258073 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6dc8d84c85-x99fw"] Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.357916 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-internal-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.358122 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxcjm\" (UniqueName: \"kubernetes.io/projected/c4743ed7-a0f7-46b1-b0d7-50828835440e-kube-api-access-fxcjm\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.358243 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-combined-ca-bundle\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.358324 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-public-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.358401 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-ovndb-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.358468 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-httpd-config\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.359122 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-config\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461002 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-config\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-internal-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461085 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxcjm\" (UniqueName: \"kubernetes.io/projected/c4743ed7-a0f7-46b1-b0d7-50828835440e-kube-api-access-fxcjm\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461147 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-combined-ca-bundle\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461171 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-public-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461189 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-ovndb-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.461296 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-httpd-config\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.495775 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-httpd-config\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.505534 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-ovndb-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.506347 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-public-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.506507 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-internal-tls-certs\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.507325 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-config\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.508484 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxcjm\" (UniqueName: \"kubernetes.io/projected/c4743ed7-a0f7-46b1-b0d7-50828835440e-kube-api-access-fxcjm\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.508918 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4743ed7-a0f7-46b1-b0d7-50828835440e-combined-ca-bundle\") pod \"neutron-6dc8d84c85-x99fw\" (UID: \"c4743ed7-a0f7-46b1-b0d7-50828835440e\") " pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:53 crc kubenswrapper[4685]: I1202 10:20:53.643044 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.282271 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb9cd6bb4-c4gq9" event={"ID":"04ab41a3-3d00-4fbd-8e1f-2995658eba9c","Type":"ContainerStarted","Data":"783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3"} Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.285884 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-79bf856964-lh2w8" event={"ID":"3d251819-b2e8-4cc5-b56c-977ea549bf2f","Type":"ContainerStarted","Data":"c413ecbe39011d4fc6bbe1ab418a191a667a559868ea69d01111a32cf43a5f2c"} Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.298358 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7f7b7954-l6dql" event={"ID":"129679f9-1a88-4185-aafd-512749132f28","Type":"ContainerStarted","Data":"52fcf384c1da3a272b227f8dd9b367730d65904701f4c6fb9e2172653caccde2"} Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.298647 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.316830 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerStarted","Data":"b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc"} Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.327689 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-79bf856964-lh2w8" podStartSLOduration=32.717729936 podStartE2EDuration="33.327667086s" podCreationTimestamp="2025-12-02 10:20:21 +0000 UTC" firstStartedPulling="2025-12-02 10:20:51.724869592 +0000 UTC m=+1144.096643746" lastFinishedPulling="2025-12-02 10:20:52.334806742 +0000 UTC m=+1144.706580896" observedRunningTime="2025-12-02 10:20:54.312886681 +0000 UTC m=+1146.684660835" watchObservedRunningTime="2025-12-02 10:20:54.327667086 +0000 UTC m=+1146.699441240" Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.381440 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6d7f7b7954-l6dql" podStartSLOduration=4.381403973 podStartE2EDuration="4.381403973s" podCreationTimestamp="2025-12-02 10:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:54.365856286 +0000 UTC m=+1146.737630440" watchObservedRunningTime="2025-12-02 10:20:54.381403973 +0000 UTC m=+1146.753178127" Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.490931 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" event={"ID":"ff77bc38-e9d1-400d-92cf-eb2154f26fa3","Type":"ContainerStarted","Data":"89b762077eee8992ff6f87fd1076e1cb5071386f567adede8898e8113a1fe933"} Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.491198 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.553249 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" podStartSLOduration=4.553217511 podStartE2EDuration="4.553217511s" podCreationTimestamp="2025-12-02 10:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:54.531093973 +0000 UTC m=+1146.902868127" watchObservedRunningTime="2025-12-02 10:20:54.553217511 +0000 UTC m=+1146.924991665" Dec 02 10:20:54 crc kubenswrapper[4685]: I1202 10:20:54.598495 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6dc8d84c85-x99fw"] Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.505628 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0fe8705d-216c-4be5-9fd4-41671563b136","Type":"ContainerStarted","Data":"2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1"} Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.509854 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345","Type":"ContainerStarted","Data":"cc9c0b34e6fe1faa8b475b98c4e603f609064439c55db34adb4078339b6d282b"} Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.513964 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6dc8d84c85-x99fw" event={"ID":"c4743ed7-a0f7-46b1-b0d7-50828835440e","Type":"ContainerStarted","Data":"0d348352b263ae893591a7575e272c7e9ad8cc648ea41e7771fa0532b0e77e9a"} Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.514004 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6dc8d84c85-x99fw" event={"ID":"c4743ed7-a0f7-46b1-b0d7-50828835440e","Type":"ContainerStarted","Data":"e4242f041f57a0e944ca03ae02d068e215aa117f8227b4d501e690746a087670"} Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.519422 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb9cd6bb4-c4gq9" event={"ID":"04ab41a3-3d00-4fbd-8e1f-2995658eba9c","Type":"ContainerStarted","Data":"e30782b4f5791754d39c2ed2d23d4a30b74d4fd535fd8045c89bbffd9d86c01d"} Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.524982 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=28.524963196 podStartE2EDuration="28.524963196s" podCreationTimestamp="2025-12-02 10:20:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:55.521183401 +0000 UTC m=+1147.892957555" watchObservedRunningTime="2025-12-02 10:20:55.524963196 +0000 UTC m=+1147.896737350" Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.570622 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podStartSLOduration=33.807545255 podStartE2EDuration="35.570601509s" podCreationTimestamp="2025-12-02 10:20:20 +0000 UTC" firstStartedPulling="2025-12-02 10:20:51.765668293 +0000 UTC m=+1144.137442447" lastFinishedPulling="2025-12-02 10:20:53.528724547 +0000 UTC m=+1145.900498701" observedRunningTime="2025-12-02 10:20:55.557716764 +0000 UTC m=+1147.929490918" watchObservedRunningTime="2025-12-02 10:20:55.570601509 +0000 UTC m=+1147.942375663" Dec 02 10:20:55 crc kubenswrapper[4685]: I1202 10:20:55.588047 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=19.588025407 podStartE2EDuration="19.588025407s" podCreationTimestamp="2025-12-02 10:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:55.579433121 +0000 UTC m=+1147.951207285" watchObservedRunningTime="2025-12-02 10:20:55.588025407 +0000 UTC m=+1147.959799551" Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.186531 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.186858 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.217452 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.251368 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.549060 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6dc8d84c85-x99fw" event={"ID":"c4743ed7-a0f7-46b1-b0d7-50828835440e","Type":"ContainerStarted","Data":"77faad807928f0f57b9be37e1e18e1bf3cf926cc2858cefd08f0b16f870817de"} Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.549456 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:57 crc kubenswrapper[4685]: I1202 10:20:57.549673 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.103469 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.103516 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.104381 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.104449 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.148444 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.156980 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.562920 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:20:58 crc kubenswrapper[4685]: I1202 10:20:58.608691 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6dc8d84c85-x99fw" podStartSLOduration=5.608666486 podStartE2EDuration="5.608666486s" podCreationTimestamp="2025-12-02 10:20:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:20:58.590363873 +0000 UTC m=+1150.962138027" watchObservedRunningTime="2025-12-02 10:20:58.608666486 +0000 UTC m=+1150.980440640" Dec 02 10:20:59 crc kubenswrapper[4685]: I1202 10:20:59.592103 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:21:00 crc kubenswrapper[4685]: I1202 10:21:00.558129 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:21:00 crc kubenswrapper[4685]: I1202 10:21:00.696234 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4b9pm"] Dec 02 10:21:00 crc kubenswrapper[4685]: I1202 10:21:00.696535 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="dnsmasq-dns" containerID="cri-o://577999921edcc5b692f156a00b9dc635423c1554902c40cbf73e4f5bacdc17cd" gracePeriod=10 Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.374053 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.374374 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.544483 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.545038 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.628715 4685 generic.go:334] "Generic (PLEG): container finished" podID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerID="577999921edcc5b692f156a00b9dc635423c1554902c40cbf73e4f5bacdc17cd" exitCode=0 Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.628783 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" event={"ID":"ae11b944-e16a-4404-94b7-c28fec4c5cc4","Type":"ContainerDied","Data":"577999921edcc5b692f156a00b9dc635423c1554902c40cbf73e4f5bacdc17cd"} Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.630576 4685 generic.go:334] "Generic (PLEG): container finished" podID="fcc4c853-7816-4912-9c98-55c29ae90396" containerID="7515c3e25a47861ece92cfebe06534215c8bd020b4182bba15c432640bc597e7" exitCode=0 Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.630628 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9m45k" event={"ID":"fcc4c853-7816-4912-9c98-55c29ae90396","Type":"ContainerDied","Data":"7515c3e25a47861ece92cfebe06534215c8bd020b4182bba15c432640bc597e7"} Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.637637 4685 generic.go:334] "Generic (PLEG): container finished" podID="8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" containerID="ed21cd4795c0f484c817670e9329fabd31dff1365d9260d1fdacf9f928618d6d" exitCode=0 Dec 02 10:21:01 crc kubenswrapper[4685]: I1202 10:21:01.638483 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v9lzr" event={"ID":"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362","Type":"ContainerDied","Data":"ed21cd4795c0f484c817670e9329fabd31dff1365d9260d1fdacf9f928618d6d"} Dec 02 10:21:03 crc kubenswrapper[4685]: I1202 10:21:03.264801 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 10:21:03 crc kubenswrapper[4685]: I1202 10:21:03.265363 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:21:03 crc kubenswrapper[4685]: I1202 10:21:03.272750 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 10:21:03 crc kubenswrapper[4685]: I1202 10:21:03.326315 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 10:21:03 crc kubenswrapper[4685]: I1202 10:21:03.398118 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 10:21:04 crc kubenswrapper[4685]: I1202 10:21:04.670888 4685 generic.go:334] "Generic (PLEG): container finished" podID="83601b22-374e-4d44-85a3-eed233d3ff11" containerID="ee91c84bd6126d401419888b4d3ddd01067c50bb40b385ac1c8c9b640048313b" exitCode=0 Dec 02 10:21:04 crc kubenswrapper[4685]: I1202 10:21:04.670976 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mfhwg" event={"ID":"83601b22-374e-4d44-85a3-eed233d3ff11","Type":"ContainerDied","Data":"ee91c84bd6126d401419888b4d3ddd01067c50bb40b385ac1c8c9b640048313b"} Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.353277 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.473206 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-combined-ca-bundle\") pod \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.473257 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-credential-keys\") pod \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.473292 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-scripts\") pod \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.473382 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-fernet-keys\") pod \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.473399 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcnpn\" (UniqueName: \"kubernetes.io/projected/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-kube-api-access-qcnpn\") pod \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.473429 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-config-data\") pod \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\" (UID: \"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362\") " Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.478798 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-scripts" (OuterVolumeSpecName: "scripts") pod "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" (UID: "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.487759 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-kube-api-access-qcnpn" (OuterVolumeSpecName: "kube-api-access-qcnpn") pod "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" (UID: "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362"). InnerVolumeSpecName "kube-api-access-qcnpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.487885 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" (UID: "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.490198 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" (UID: "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.504978 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" (UID: "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.533777 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-config-data" (OuterVolumeSpecName: "config-data") pod "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" (UID: "8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.576450 4685 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.576486 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcnpn\" (UniqueName: \"kubernetes.io/projected/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-kube-api-access-qcnpn\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.576500 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.576508 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.576518 4685 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.576528 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.682846 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v9lzr" event={"ID":"8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362","Type":"ContainerDied","Data":"39fc204b37bc512d209df273924f31f179e3021fb8c5bd1a3cefe9147e61f1bf"} Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.682898 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39fc204b37bc512d209df273924f31f179e3021fb8c5bd1a3cefe9147e61f1bf" Dec 02 10:21:05 crc kubenswrapper[4685]: I1202 10:21:05.682874 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v9lzr" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.508630 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6549967485-7j47k"] Dec 02 10:21:06 crc kubenswrapper[4685]: E1202 10:21:06.509736 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" containerName="keystone-bootstrap" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.509839 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" containerName="keystone-bootstrap" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.510149 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" containerName="keystone-bootstrap" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.510949 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.521164 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-t7kkp" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.521264 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.521306 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.521358 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.521390 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.521511 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.542194 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6549967485-7j47k"] Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.695315 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-scripts\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.696175 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-config-data\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.696335 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x76jg\" (UniqueName: \"kubernetes.io/projected/8f4445dd-b293-4beb-af28-3d6dcf902a94-kube-api-access-x76jg\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.696469 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-fernet-keys\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.696638 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-internal-tls-certs\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.696783 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-public-tls-certs\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.696965 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-combined-ca-bundle\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.697109 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-credential-keys\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799362 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-combined-ca-bundle\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799434 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-credential-keys\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799467 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-scripts\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799618 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-config-data\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799644 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x76jg\" (UniqueName: \"kubernetes.io/projected/8f4445dd-b293-4beb-af28-3d6dcf902a94-kube-api-access-x76jg\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799666 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-fernet-keys\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799691 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-internal-tls-certs\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.799717 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-public-tls-certs\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.809319 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-fernet-keys\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.809351 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-config-data\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.809603 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-scripts\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.810000 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-public-tls-certs\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.813377 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-combined-ca-bundle\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.818186 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-credential-keys\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.818378 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f4445dd-b293-4beb-af28-3d6dcf902a94-internal-tls-certs\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.823680 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x76jg\" (UniqueName: \"kubernetes.io/projected/8f4445dd-b293-4beb-af28-3d6dcf902a94-kube-api-access-x76jg\") pod \"keystone-6549967485-7j47k\" (UID: \"8f4445dd-b293-4beb-af28-3d6dcf902a94\") " pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:06 crc kubenswrapper[4685]: I1202 10:21:06.861826 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:07 crc kubenswrapper[4685]: I1202 10:21:07.938651 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: i/o timeout" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.766022 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" event={"ID":"ae11b944-e16a-4404-94b7-c28fec4c5cc4","Type":"ContainerDied","Data":"89fd6f9e3a51d9f67606e7d09f27e02afbfb5b8733b19734d02cc922780c29c1"} Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.766067 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89fd6f9e3a51d9f67606e7d09f27e02afbfb5b8733b19734d02cc922780c29c1" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.794864 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.795104 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-mfhwg" event={"ID":"83601b22-374e-4d44-85a3-eed233d3ff11","Type":"ContainerDied","Data":"3ec4d5126299057b4efa8b75d84c56df65fd254af4fedce02ba42398480187bb"} Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.795135 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ec4d5126299057b4efa8b75d84c56df65fd254af4fedce02ba42398480187bb" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.821061 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.831731 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-9m45k" event={"ID":"fcc4c853-7816-4912-9c98-55c29ae90396","Type":"ContainerDied","Data":"33dfe9111cbf027109d054691824231ea48dbfcea72739ecd1636ef142b2be8a"} Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.831802 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33dfe9111cbf027109d054691824231ea48dbfcea72739ecd1636ef142b2be8a" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.841548 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9m45k" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844452 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-combined-ca-bundle\") pod \"83601b22-374e-4d44-85a3-eed233d3ff11\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844527 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-sb\") pod \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844576 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-nb\") pod \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844595 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-db-sync-config-data\") pod \"83601b22-374e-4d44-85a3-eed233d3ff11\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844615 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-config\") pod \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844646 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-swift-storage-0\") pod \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844663 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-combined-ca-bundle\") pod \"fcc4c853-7816-4912-9c98-55c29ae90396\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844681 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-scripts\") pod \"fcc4c853-7816-4912-9c98-55c29ae90396\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844699 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-config-data\") pod \"fcc4c853-7816-4912-9c98-55c29ae90396\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844735 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s62bt\" (UniqueName: \"kubernetes.io/projected/fcc4c853-7816-4912-9c98-55c29ae90396-kube-api-access-s62bt\") pod \"fcc4c853-7816-4912-9c98-55c29ae90396\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844789 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xzdp\" (UniqueName: \"kubernetes.io/projected/ae11b944-e16a-4404-94b7-c28fec4c5cc4-kube-api-access-4xzdp\") pod \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844814 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcc4c853-7816-4912-9c98-55c29ae90396-logs\") pod \"fcc4c853-7816-4912-9c98-55c29ae90396\" (UID: \"fcc4c853-7816-4912-9c98-55c29ae90396\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844846 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxtmm\" (UniqueName: \"kubernetes.io/projected/83601b22-374e-4d44-85a3-eed233d3ff11-kube-api-access-gxtmm\") pod \"83601b22-374e-4d44-85a3-eed233d3ff11\" (UID: \"83601b22-374e-4d44-85a3-eed233d3ff11\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.844865 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-svc\") pod \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\" (UID: \"ae11b944-e16a-4404-94b7-c28fec4c5cc4\") " Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.853743 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc4c853-7816-4912-9c98-55c29ae90396-logs" (OuterVolumeSpecName: "logs") pod "fcc4c853-7816-4912-9c98-55c29ae90396" (UID: "fcc4c853-7816-4912-9c98-55c29ae90396"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.872204 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcc4c853-7816-4912-9c98-55c29ae90396-kube-api-access-s62bt" (OuterVolumeSpecName: "kube-api-access-s62bt") pod "fcc4c853-7816-4912-9c98-55c29ae90396" (UID: "fcc4c853-7816-4912-9c98-55c29ae90396"). InnerVolumeSpecName "kube-api-access-s62bt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.872868 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae11b944-e16a-4404-94b7-c28fec4c5cc4-kube-api-access-4xzdp" (OuterVolumeSpecName: "kube-api-access-4xzdp") pod "ae11b944-e16a-4404-94b7-c28fec4c5cc4" (UID: "ae11b944-e16a-4404-94b7-c28fec4c5cc4"). InnerVolumeSpecName "kube-api-access-4xzdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.872951 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-scripts" (OuterVolumeSpecName: "scripts") pod "fcc4c853-7816-4912-9c98-55c29ae90396" (UID: "fcc4c853-7816-4912-9c98-55c29ae90396"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.909726 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83601b22-374e-4d44-85a3-eed233d3ff11-kube-api-access-gxtmm" (OuterVolumeSpecName: "kube-api-access-gxtmm") pod "83601b22-374e-4d44-85a3-eed233d3ff11" (UID: "83601b22-374e-4d44-85a3-eed233d3ff11"). InnerVolumeSpecName "kube-api-access-gxtmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.931381 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "83601b22-374e-4d44-85a3-eed233d3ff11" (UID: "83601b22-374e-4d44-85a3-eed233d3ff11"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.963996 4685 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.964030 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.964040 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s62bt\" (UniqueName: \"kubernetes.io/projected/fcc4c853-7816-4912-9c98-55c29ae90396-kube-api-access-s62bt\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.964052 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xzdp\" (UniqueName: \"kubernetes.io/projected/ae11b944-e16a-4404-94b7-c28fec4c5cc4-kube-api-access-4xzdp\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.964062 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fcc4c853-7816-4912-9c98-55c29ae90396-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.964072 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxtmm\" (UniqueName: \"kubernetes.io/projected/83601b22-374e-4d44-85a3-eed233d3ff11-kube-api-access-gxtmm\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.981735 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcc4c853-7816-4912-9c98-55c29ae90396" (UID: "fcc4c853-7816-4912-9c98-55c29ae90396"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:08 crc kubenswrapper[4685]: I1202 10:21:08.990720 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ae11b944-e16a-4404-94b7-c28fec4c5cc4" (UID: "ae11b944-e16a-4404-94b7-c28fec4c5cc4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.042107 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-config-data" (OuterVolumeSpecName: "config-data") pod "fcc4c853-7816-4912-9c98-55c29ae90396" (UID: "fcc4c853-7816-4912-9c98-55c29ae90396"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.070821 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83601b22-374e-4d44-85a3-eed233d3ff11" (UID: "83601b22-374e-4d44-85a3-eed233d3ff11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.070875 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.070904 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcc4c853-7816-4912-9c98-55c29ae90396-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.070913 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.093441 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ae11b944-e16a-4404-94b7-c28fec4c5cc4" (UID: "ae11b944-e16a-4404-94b7-c28fec4c5cc4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.105814 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ae11b944-e16a-4404-94b7-c28fec4c5cc4" (UID: "ae11b944-e16a-4404-94b7-c28fec4c5cc4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.128279 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ae11b944-e16a-4404-94b7-c28fec4c5cc4" (UID: "ae11b944-e16a-4404-94b7-c28fec4c5cc4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.173542 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.173586 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83601b22-374e-4d44-85a3-eed233d3ff11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.173597 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.173612 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.187460 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-config" (OuterVolumeSpecName: "config") pod "ae11b944-e16a-4404-94b7-c28fec4c5cc4" (UID: "ae11b944-e16a-4404-94b7-c28fec4c5cc4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.223689 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6549967485-7j47k"] Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.277855 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae11b944-e16a-4404-94b7-c28fec4c5cc4-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.859049 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerStarted","Data":"a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a"} Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.870520 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-9m45k" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.871394 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6549967485-7j47k" event={"ID":"8f4445dd-b293-4beb-af28-3d6dcf902a94","Type":"ContainerStarted","Data":"684f916864f8eac29c21b4149429c104e7550fd8ce1168e00e2dc200a6c7c307"} Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.871495 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" Dec 02 10:21:09 crc kubenswrapper[4685]: I1202 10:21:09.872626 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-mfhwg" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.105257 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-9f589dd8-wf2cx"] Dec 02 10:21:10 crc kubenswrapper[4685]: E1202 10:21:10.106123 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="dnsmasq-dns" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106141 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="dnsmasq-dns" Dec 02 10:21:10 crc kubenswrapper[4685]: E1202 10:21:10.106167 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc4c853-7816-4912-9c98-55c29ae90396" containerName="placement-db-sync" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106173 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc4c853-7816-4912-9c98-55c29ae90396" containerName="placement-db-sync" Dec 02 10:21:10 crc kubenswrapper[4685]: E1202 10:21:10.106198 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83601b22-374e-4d44-85a3-eed233d3ff11" containerName="barbican-db-sync" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106204 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="83601b22-374e-4d44-85a3-eed233d3ff11" containerName="barbican-db-sync" Dec 02 10:21:10 crc kubenswrapper[4685]: E1202 10:21:10.106224 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="init" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106230 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="init" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106578 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcc4c853-7816-4912-9c98-55c29ae90396" containerName="placement-db-sync" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106596 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="83601b22-374e-4d44-85a3-eed233d3ff11" containerName="barbican-db-sync" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.106615 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="dnsmasq-dns" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.146726 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.152932 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.153158 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.157584 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-xz5hx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.157825 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.160267 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.227643 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9f589dd8-wf2cx"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.308600 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-scripts\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.309183 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-public-tls-certs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.309313 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1af68409-9c33-470d-96bd-abf8eb121c9d-logs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.309432 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7r87\" (UniqueName: \"kubernetes.io/projected/1af68409-9c33-470d-96bd-abf8eb121c9d-kube-api-access-p7r87\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.309650 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-combined-ca-bundle\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.309753 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-config-data\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.309910 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-internal-tls-certs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.340044 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7b57b745f4-h769r"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.341961 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.355072 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.355141 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-s6w2j" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.355324 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.374639 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7b57b745f4-h769r"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.389404 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-z7f8q"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.390964 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.404839 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6cd49d95f9-sdvx2"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.406618 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.411768 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.412677 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvzpv\" (UniqueName: \"kubernetes.io/projected/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-kube-api-access-bvzpv\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.412769 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48d319ac-7ed6-48dd-b934-91833d81dd44-logs\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.412855 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-internal-tls-certs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.412911 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.420713 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-scripts\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.420969 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-config\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.421075 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.421178 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-svc\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.421376 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-public-tls-certs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.421490 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-config-data-custom\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.421587 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1af68409-9c33-470d-96bd-abf8eb121c9d-logs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.422429 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7r87\" (UniqueName: \"kubernetes.io/projected/1af68409-9c33-470d-96bd-abf8eb121c9d-kube-api-access-p7r87\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.422570 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.422681 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-254m4\" (UniqueName: \"kubernetes.io/projected/48d319ac-7ed6-48dd-b934-91833d81dd44-kube-api-access-254m4\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.422942 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-combined-ca-bundle\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.423085 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-combined-ca-bundle\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.423264 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-config-data\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.423378 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-config-data\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.431263 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1af68409-9c33-470d-96bd-abf8eb121c9d-logs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.421945 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-z7f8q"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.437096 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-scripts\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.441000 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-internal-tls-certs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.441087 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4b9pm"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.449121 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-config-data\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.449141 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4b9pm"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.449537 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-public-tls-certs\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.455062 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1af68409-9c33-470d-96bd-abf8eb121c9d-combined-ca-bundle\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.459637 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6cd49d95f9-sdvx2"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.514898 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-65bd55bccb-s55r4"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.516367 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.522048 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.522046 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7r87\" (UniqueName: \"kubernetes.io/projected/1af68409-9c33-470d-96bd-abf8eb121c9d-kube-api-access-p7r87\") pod \"placement-9f589dd8-wf2cx\" (UID: \"1af68409-9c33-470d-96bd-abf8eb121c9d\") " pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.527923 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-config\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528007 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-combined-ca-bundle\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528037 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528064 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-svc\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528100 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-config-data\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528135 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-logs\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528177 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-config-data-custom\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528213 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-config-data-custom\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528433 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4mkm\" (UniqueName: \"kubernetes.io/projected/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-kube-api-access-p4mkm\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528477 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528511 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-254m4\" (UniqueName: \"kubernetes.io/projected/48d319ac-7ed6-48dd-b934-91833d81dd44-kube-api-access-254m4\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528575 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-combined-ca-bundle\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528613 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-config-data\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528651 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528688 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvzpv\" (UniqueName: \"kubernetes.io/projected/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-kube-api-access-bvzpv\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.528739 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48d319ac-7ed6-48dd-b934-91833d81dd44-logs\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.532292 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48d319ac-7ed6-48dd-b934-91833d81dd44-logs\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.533788 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-config\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.535469 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.535905 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-svc\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.539444 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65bd55bccb-s55r4"] Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.539718 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.540248 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-combined-ca-bundle\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.540405 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.540932 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-config-data-custom\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.559339 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48d319ac-7ed6-48dd-b934-91833d81dd44-config-data\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.572265 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvzpv\" (UniqueName: \"kubernetes.io/projected/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-kube-api-access-bvzpv\") pod \"dnsmasq-dns-85ff748b95-z7f8q\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.587218 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-254m4\" (UniqueName: \"kubernetes.io/projected/48d319ac-7ed6-48dd-b934-91833d81dd44-kube-api-access-254m4\") pod \"barbican-keystone-listener-7b57b745f4-h769r\" (UID: \"48d319ac-7ed6-48dd-b934-91833d81dd44\") " pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.632979 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633117 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fngzw\" (UniqueName: \"kubernetes.io/projected/f64b77d2-5c73-478c-b793-20e5982cdea6-kube-api-access-fngzw\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633371 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data-custom\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633406 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64b77d2-5c73-478c-b793-20e5982cdea6-logs\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633457 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633481 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-combined-ca-bundle\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633508 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-config-data\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633537 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-logs\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633593 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-config-data-custom\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633611 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4mkm\" (UniqueName: \"kubernetes.io/projected/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-kube-api-access-p4mkm\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.633642 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-combined-ca-bundle\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.636397 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-logs\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.645102 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-config-data-custom\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.645318 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-combined-ca-bundle\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.669869 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4mkm\" (UniqueName: \"kubernetes.io/projected/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-kube-api-access-p4mkm\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.692372 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.716064 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2daca7c-e2f6-4e26-a1d8-259d2123c2d6-config-data\") pod \"barbican-worker-6cd49d95f9-sdvx2\" (UID: \"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6\") " pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.736682 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-combined-ca-bundle\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.736741 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fngzw\" (UniqueName: \"kubernetes.io/projected/f64b77d2-5c73-478c-b793-20e5982cdea6-kube-api-access-fngzw\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.737009 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data-custom\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.737094 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64b77d2-5c73-478c-b793-20e5982cdea6-logs\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.737173 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.741357 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64b77d2-5c73-478c-b793-20e5982cdea6-logs\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.741811 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.750680 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data-custom\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.757177 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.760178 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fngzw\" (UniqueName: \"kubernetes.io/projected/f64b77d2-5c73-478c-b793-20e5982cdea6-kube-api-access-fngzw\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.776449 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-combined-ca-bundle\") pod \"barbican-api-65bd55bccb-s55r4\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.842055 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6cd49d95f9-sdvx2" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.853838 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.903179 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6549967485-7j47k" event={"ID":"8f4445dd-b293-4beb-af28-3d6dcf902a94","Type":"ContainerStarted","Data":"2fc04f24f410e4001976d68ff24353c1b352bfa2c2e72f112916c5a41f95629a"} Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.903967 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.910083 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xx8rz" event={"ID":"5b558b4d-8398-435d-8925-e36325681252","Type":"ContainerStarted","Data":"250cda1f46a6099f638e7b8f0a8261f8d473b2e75c320a2969a4426fd7a695cf"} Dec 02 10:21:10 crc kubenswrapper[4685]: I1202 10:21:10.951822 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6549967485-7j47k" podStartSLOduration=4.951802027 podStartE2EDuration="4.951802027s" podCreationTimestamp="2025-12-02 10:21:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:10.943248232 +0000 UTC m=+1163.315022386" watchObservedRunningTime="2025-12-02 10:21:10.951802027 +0000 UTC m=+1163.323576181" Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.037042 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-xx8rz" podStartSLOduration=4.905843357 podStartE2EDuration="1m0.037017167s" podCreationTimestamp="2025-12-02 10:20:11 +0000 UTC" firstStartedPulling="2025-12-02 10:20:13.481645017 +0000 UTC m=+1105.853419171" lastFinishedPulling="2025-12-02 10:21:08.612818827 +0000 UTC m=+1160.984592981" observedRunningTime="2025-12-02 10:21:10.982116549 +0000 UTC m=+1163.353890703" watchObservedRunningTime="2025-12-02 10:21:11.037017167 +0000 UTC m=+1163.408791321" Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.381778 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.413766 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9f589dd8-wf2cx"] Dec 02 10:21:11 crc kubenswrapper[4685]: W1202 10:21:11.438921 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1af68409_9c33_470d_96bd_abf8eb121c9d.slice/crio-319b9914c9b9a1d7a598ca3a7f6e5bf4d7789c4abd7d73f755d47bd268e065a7 WatchSource:0}: Error finding container 319b9914c9b9a1d7a598ca3a7f6e5bf4d7789c4abd7d73f755d47bd268e065a7: Status 404 returned error can't find the container with id 319b9914c9b9a1d7a598ca3a7f6e5bf4d7789c4abd7d73f755d47bd268e065a7 Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.556935 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6cd49d95f9-sdvx2"] Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.594932 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-79bf856964-lh2w8" podUID="3d251819-b2e8-4cc5-b56c-977ea549bf2f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.599052 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7b57b745f4-h769r"] Dec 02 10:21:11 crc kubenswrapper[4685]: W1202 10:21:11.641125 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2daca7c_e2f6_4e26_a1d8_259d2123c2d6.slice/crio-668975cbd9c10e93d3216b4d022fad8d9e84f7234cbfa408332dda0b034baccd WatchSource:0}: Error finding container 668975cbd9c10e93d3216b4d022fad8d9e84f7234cbfa408332dda0b034baccd: Status 404 returned error can't find the container with id 668975cbd9c10e93d3216b4d022fad8d9e84f7234cbfa408332dda0b034baccd Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.734610 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-z7f8q"] Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.746890 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-65bd55bccb-s55r4"] Dec 02 10:21:11 crc kubenswrapper[4685]: W1202 10:21:11.779284 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf64b77d2_5c73_478c_b793_20e5982cdea6.slice/crio-81b820407420bdc27aee5c77e47fc7dea5150b861b58778d95a5844aa7af4733 WatchSource:0}: Error finding container 81b820407420bdc27aee5c77e47fc7dea5150b861b58778d95a5844aa7af4733: Status 404 returned error can't find the container with id 81b820407420bdc27aee5c77e47fc7dea5150b861b58778d95a5844aa7af4733 Dec 02 10:21:11 crc kubenswrapper[4685]: W1202 10:21:11.781084 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e8726be_5d67_4d87_8bf0_fc7a95f575e1.slice/crio-0f2fdf937b0045efd4ab32a7122fd09fc59111f69fcf0bb3dde6cd7b12ae52b2 WatchSource:0}: Error finding container 0f2fdf937b0045efd4ab32a7122fd09fc59111f69fcf0bb3dde6cd7b12ae52b2: Status 404 returned error can't find the container with id 0f2fdf937b0045efd4ab32a7122fd09fc59111f69fcf0bb3dde6cd7b12ae52b2 Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.965449 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" path="/var/lib/kubelet/pods/ae11b944-e16a-4404-94b7-c28fec4c5cc4/volumes" Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.973789 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cd49d95f9-sdvx2" event={"ID":"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6","Type":"ContainerStarted","Data":"668975cbd9c10e93d3216b4d022fad8d9e84f7234cbfa408332dda0b034baccd"} Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.976789 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" event={"ID":"48d319ac-7ed6-48dd-b934-91833d81dd44","Type":"ContainerStarted","Data":"b5d669a7e2d4dd1064673b8c89b1d68ac0b1929a080887a31147f1ea763f7817"} Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.988692 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65bd55bccb-s55r4" event={"ID":"f64b77d2-5c73-478c-b793-20e5982cdea6","Type":"ContainerStarted","Data":"81b820407420bdc27aee5c77e47fc7dea5150b861b58778d95a5844aa7af4733"} Dec 02 10:21:11 crc kubenswrapper[4685]: I1202 10:21:11.998167 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9f589dd8-wf2cx" event={"ID":"1af68409-9c33-470d-96bd-abf8eb121c9d","Type":"ContainerStarted","Data":"319b9914c9b9a1d7a598ca3a7f6e5bf4d7789c4abd7d73f755d47bd268e065a7"} Dec 02 10:21:12 crc kubenswrapper[4685]: I1202 10:21:12.005763 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" event={"ID":"6e8726be-5d67-4d87-8bf0-fc7a95f575e1","Type":"ContainerStarted","Data":"0f2fdf937b0045efd4ab32a7122fd09fc59111f69fcf0bb3dde6cd7b12ae52b2"} Dec 02 10:21:12 crc kubenswrapper[4685]: I1202 10:21:12.939852 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-4b9pm" podUID="ae11b944-e16a-4404-94b7-c28fec4c5cc4" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: i/o timeout" Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.021455 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9f589dd8-wf2cx" event={"ID":"1af68409-9c33-470d-96bd-abf8eb121c9d","Type":"ContainerStarted","Data":"62af2e41c909ea85ecb2fb6fe2b371d281d6850f1affbfef701f34e718371b0b"} Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.021499 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9f589dd8-wf2cx" event={"ID":"1af68409-9c33-470d-96bd-abf8eb121c9d","Type":"ContainerStarted","Data":"690a30f52d724251f36009cc84302fdfcfa05129710af27dca02b776d2d1f310"} Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.022370 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.022401 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.027479 4685 generic.go:334] "Generic (PLEG): container finished" podID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerID="ee4bcd9e421a286c71ecbde735936f2187b305fb2114157c0a7597d3d959907a" exitCode=0 Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.027536 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" event={"ID":"6e8726be-5d67-4d87-8bf0-fc7a95f575e1","Type":"ContainerDied","Data":"ee4bcd9e421a286c71ecbde735936f2187b305fb2114157c0a7597d3d959907a"} Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.045171 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65bd55bccb-s55r4" event={"ID":"f64b77d2-5c73-478c-b793-20e5982cdea6","Type":"ContainerStarted","Data":"d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8"} Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.045220 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.045233 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65bd55bccb-s55r4" event={"ID":"f64b77d2-5c73-478c-b793-20e5982cdea6","Type":"ContainerStarted","Data":"4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698"} Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.045262 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.091949 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-9f589dd8-wf2cx" podStartSLOduration=3.091926186 podStartE2EDuration="3.091926186s" podCreationTimestamp="2025-12-02 10:21:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:13.051237558 +0000 UTC m=+1165.423011722" watchObservedRunningTime="2025-12-02 10:21:13.091926186 +0000 UTC m=+1165.463700340" Dec 02 10:21:13 crc kubenswrapper[4685]: I1202 10:21:13.102835 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-65bd55bccb-s55r4" podStartSLOduration=3.102812934 podStartE2EDuration="3.102812934s" podCreationTimestamp="2025-12-02 10:21:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:13.088659826 +0000 UTC m=+1165.460434000" watchObservedRunningTime="2025-12-02 10:21:13.102812934 +0000 UTC m=+1165.474587098" Dec 02 10:21:14 crc kubenswrapper[4685]: I1202 10:21:14.064661 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" event={"ID":"6e8726be-5d67-4d87-8bf0-fc7a95f575e1","Type":"ContainerStarted","Data":"82e6f22d52fea00fbb032cf900f31905912c2587db2158a6933c6c34fa3474ad"} Dec 02 10:21:14 crc kubenswrapper[4685]: I1202 10:21:14.065910 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:14 crc kubenswrapper[4685]: I1202 10:21:14.093016 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" podStartSLOduration=4.092990776 podStartE2EDuration="4.092990776s" podCreationTimestamp="2025-12-02 10:21:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:14.09024904 +0000 UTC m=+1166.462023194" watchObservedRunningTime="2025-12-02 10:21:14.092990776 +0000 UTC m=+1166.464764930" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.205845 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-56bd4844cd-s5f2m"] Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.211501 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.212849 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56bd4844cd-s5f2m"] Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.225507 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.225705 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270261 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-internal-tls-certs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270345 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9nwx\" (UniqueName: \"kubernetes.io/projected/746491bf-6d00-4370-a7ba-740687bd6faa-kube-api-access-l9nwx\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270508 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-public-tls-certs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270658 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746491bf-6d00-4370-a7ba-740687bd6faa-logs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270779 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-config-data\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270909 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-config-data-custom\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.270938 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-combined-ca-bundle\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.372683 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-internal-tls-certs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.373181 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9nwx\" (UniqueName: \"kubernetes.io/projected/746491bf-6d00-4370-a7ba-740687bd6faa-kube-api-access-l9nwx\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.373242 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-public-tls-certs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.373326 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746491bf-6d00-4370-a7ba-740687bd6faa-logs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.373379 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-config-data\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.373459 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-config-data-custom\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.373502 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-combined-ca-bundle\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.375682 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/746491bf-6d00-4370-a7ba-740687bd6faa-logs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.384379 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-public-tls-certs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.384741 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-internal-tls-certs\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.406233 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-config-data\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.406719 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-combined-ca-bundle\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.413426 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/746491bf-6d00-4370-a7ba-740687bd6faa-config-data-custom\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.418408 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9nwx\" (UniqueName: \"kubernetes.io/projected/746491bf-6d00-4370-a7ba-740687bd6faa-kube-api-access-l9nwx\") pod \"barbican-api-56bd4844cd-s5f2m\" (UID: \"746491bf-6d00-4370-a7ba-740687bd6faa\") " pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:15 crc kubenswrapper[4685]: I1202 10:21:15.545892 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.111977 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" event={"ID":"48d319ac-7ed6-48dd-b934-91833d81dd44","Type":"ContainerStarted","Data":"2d6bfa838e31237f3b26100b3c0a795156a5b1e63e29459465a8ac0fd25e0065"} Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.112271 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" event={"ID":"48d319ac-7ed6-48dd-b934-91833d81dd44","Type":"ContainerStarted","Data":"0e9f5eea23bbbbabd00bf412b5798684df85425d44a6b7338001472db2622624"} Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.114852 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cd49d95f9-sdvx2" event={"ID":"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6","Type":"ContainerStarted","Data":"363ba6d00563f54ba02ef73d7c57858d91aa1a9377558d2d3756e3229eb667d2"} Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.114897 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6cd49d95f9-sdvx2" event={"ID":"f2daca7c-e2f6-4e26-a1d8-259d2123c2d6","Type":"ContainerStarted","Data":"85d00c7c2090c00097e70fdfd89f758ed9d6af3e2f6a4b3160c36490645119fb"} Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.140147 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7b57b745f4-h769r" podStartSLOduration=2.850467975 podStartE2EDuration="6.140121741s" podCreationTimestamp="2025-12-02 10:21:10 +0000 UTC" firstStartedPulling="2025-12-02 10:21:11.645446974 +0000 UTC m=+1164.017221128" lastFinishedPulling="2025-12-02 10:21:14.93510074 +0000 UTC m=+1167.306874894" observedRunningTime="2025-12-02 10:21:16.134546558 +0000 UTC m=+1168.506320712" watchObservedRunningTime="2025-12-02 10:21:16.140121741 +0000 UTC m=+1168.511895895" Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.168362 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6cd49d95f9-sdvx2" podStartSLOduration=2.878876146 podStartE2EDuration="6.168339406s" podCreationTimestamp="2025-12-02 10:21:10 +0000 UTC" firstStartedPulling="2025-12-02 10:21:11.662115622 +0000 UTC m=+1164.033889776" lastFinishedPulling="2025-12-02 10:21:14.951578792 +0000 UTC m=+1167.323353036" observedRunningTime="2025-12-02 10:21:16.161862508 +0000 UTC m=+1168.533636662" watchObservedRunningTime="2025-12-02 10:21:16.168339406 +0000 UTC m=+1168.540113560" Dec 02 10:21:16 crc kubenswrapper[4685]: I1202 10:21:16.232209 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-56bd4844cd-s5f2m"] Dec 02 10:21:17 crc kubenswrapper[4685]: I1202 10:21:17.130463 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56bd4844cd-s5f2m" event={"ID":"746491bf-6d00-4370-a7ba-740687bd6faa","Type":"ContainerStarted","Data":"2ec64cdb5d77096a547dd1dc4a9d77ae97a9dec231bfc825035bddade2ed3e66"} Dec 02 10:21:17 crc kubenswrapper[4685]: I1202 10:21:17.130840 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56bd4844cd-s5f2m" event={"ID":"746491bf-6d00-4370-a7ba-740687bd6faa","Type":"ContainerStarted","Data":"84475e24f9aec8c37e016382e415d2eabb634ccb5cb51f1cf223c0a80f557f1a"} Dec 02 10:21:20 crc kubenswrapper[4685]: I1202 10:21:20.695579 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:21:20 crc kubenswrapper[4685]: I1202 10:21:20.743991 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:20 crc kubenswrapper[4685]: I1202 10:21:20.905023 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-lcv4t"] Dec 02 10:21:20 crc kubenswrapper[4685]: I1202 10:21:20.915093 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerName="dnsmasq-dns" containerID="cri-o://89b762077eee8992ff6f87fd1076e1cb5071386f567adede8898e8113a1fe933" gracePeriod=10 Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.178855 4685 generic.go:334] "Generic (PLEG): container finished" podID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerID="89b762077eee8992ff6f87fd1076e1cb5071386f567adede8898e8113a1fe933" exitCode=0 Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.178931 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" event={"ID":"ff77bc38-e9d1-400d-92cf-eb2154f26fa3","Type":"ContainerDied","Data":"89b762077eee8992ff6f87fd1076e1cb5071386f567adede8898e8113a1fe933"} Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.186357 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-56bd4844cd-s5f2m" event={"ID":"746491bf-6d00-4370-a7ba-740687bd6faa","Type":"ContainerStarted","Data":"1cde45c2fb2c58fa0736cb8364e2c037a4d3b79d5f14cb7e1d1c6e933242d213"} Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.186871 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.187147 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.198677 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-56bd4844cd-s5f2m" podUID="746491bf-6d00-4370-a7ba-740687bd6faa" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.158:9311/healthcheck\": dial tcp 10.217.0.158:9311: connect: connection refused" Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.375084 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 02 10:21:21 crc kubenswrapper[4685]: I1202 10:21:21.544642 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-79bf856964-lh2w8" podUID="3d251819-b2e8-4cc5-b56c-977ea549bf2f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.201928 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.224150 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-56bd4844cd-s5f2m" podStartSLOduration=8.224132111 podStartE2EDuration="8.224132111s" podCreationTimestamp="2025-12-02 10:21:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:21.227275406 +0000 UTC m=+1173.599049560" watchObservedRunningTime="2025-12-02 10:21:23.224132111 +0000 UTC m=+1175.595906255" Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.590111 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.691954 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6dc8d84c85-x99fw" Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.767010 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6d7f7b7954-l6dql"] Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.767283 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6d7f7b7954-l6dql" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-api" containerID="cri-o://ca4cbbbc5e5146fcac5adb5f35ce091207ae56bd84e1efe4b8597de362c39d86" gracePeriod=30 Dec 02 10:21:23 crc kubenswrapper[4685]: I1202 10:21:23.767881 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6d7f7b7954-l6dql" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-httpd" containerID="cri-o://52fcf384c1da3a272b227f8dd9b367730d65904701f4c6fb9e2172653caccde2" gracePeriod=30 Dec 02 10:21:24 crc kubenswrapper[4685]: I1202 10:21:24.231486 4685 generic.go:334] "Generic (PLEG): container finished" podID="5b558b4d-8398-435d-8925-e36325681252" containerID="250cda1f46a6099f638e7b8f0a8261f8d473b2e75c320a2969a4426fd7a695cf" exitCode=0 Dec 02 10:21:24 crc kubenswrapper[4685]: I1202 10:21:24.232337 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xx8rz" event={"ID":"5b558b4d-8398-435d-8925-e36325681252","Type":"ContainerDied","Data":"250cda1f46a6099f638e7b8f0a8261f8d473b2e75c320a2969a4426fd7a695cf"} Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.287459 4685 generic.go:334] "Generic (PLEG): container finished" podID="129679f9-1a88-4185-aafd-512749132f28" containerID="52fcf384c1da3a272b227f8dd9b367730d65904701f4c6fb9e2172653caccde2" exitCode=0 Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.288021 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7f7b7954-l6dql" event={"ID":"129679f9-1a88-4185-aafd-512749132f28","Type":"ContainerDied","Data":"52fcf384c1da3a272b227f8dd9b367730d65904701f4c6fb9e2172653caccde2"} Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.289659 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.298492 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" event={"ID":"ff77bc38-e9d1-400d-92cf-eb2154f26fa3","Type":"ContainerDied","Data":"c1fef34587cf1ae16b3962cc270d7da7f064158ddec071d6edbf5f6a1f693377"} Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.298540 4685 scope.go:117] "RemoveContainer" containerID="89b762077eee8992ff6f87fd1076e1cb5071386f567adede8898e8113a1fe933" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.385782 4685 scope.go:117] "RemoveContainer" containerID="eead65235fb4d2aa5aae35b3e65ac543c0d79a20e1a1e64c32e762f1f3f6b479" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.416964 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-swift-storage-0\") pod \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.417055 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-sb\") pod \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.417132 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhmdh\" (UniqueName: \"kubernetes.io/projected/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-kube-api-access-vhmdh\") pod \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.417655 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-config\") pod \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.417684 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-svc\") pod \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.417736 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-nb\") pod \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\" (UID: \"ff77bc38-e9d1-400d-92cf-eb2154f26fa3\") " Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.435878 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-kube-api-access-vhmdh" (OuterVolumeSpecName: "kube-api-access-vhmdh") pod "ff77bc38-e9d1-400d-92cf-eb2154f26fa3" (UID: "ff77bc38-e9d1-400d-92cf-eb2154f26fa3"). InnerVolumeSpecName "kube-api-access-vhmdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.521636 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhmdh\" (UniqueName: \"kubernetes.io/projected/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-kube-api-access-vhmdh\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.820165 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ff77bc38-e9d1-400d-92cf-eb2154f26fa3" (UID: "ff77bc38-e9d1-400d-92cf-eb2154f26fa3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.826095 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.827679 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-config" (OuterVolumeSpecName: "config") pod "ff77bc38-e9d1-400d-92cf-eb2154f26fa3" (UID: "ff77bc38-e9d1-400d-92cf-eb2154f26fa3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.874155 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ff77bc38-e9d1-400d-92cf-eb2154f26fa3" (UID: "ff77bc38-e9d1-400d-92cf-eb2154f26fa3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.881704 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ff77bc38-e9d1-400d-92cf-eb2154f26fa3" (UID: "ff77bc38-e9d1-400d-92cf-eb2154f26fa3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.882017 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ff77bc38-e9d1-400d-92cf-eb2154f26fa3" (UID: "ff77bc38-e9d1-400d-92cf-eb2154f26fa3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.884959 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.927488 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.927516 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.927524 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.927533 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff77bc38-e9d1-400d-92cf-eb2154f26fa3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:25 crc kubenswrapper[4685]: I1202 10:21:25.964128 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:21:26 crc kubenswrapper[4685]: E1202 10:21:26.101685 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.130459 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-config-data\") pod \"5b558b4d-8398-435d-8925-e36325681252\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.130515 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-scripts\") pod \"5b558b4d-8398-435d-8925-e36325681252\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.130548 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwrmm\" (UniqueName: \"kubernetes.io/projected/5b558b4d-8398-435d-8925-e36325681252-kube-api-access-bwrmm\") pod \"5b558b4d-8398-435d-8925-e36325681252\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.130592 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-db-sync-config-data\") pod \"5b558b4d-8398-435d-8925-e36325681252\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.130730 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-combined-ca-bundle\") pod \"5b558b4d-8398-435d-8925-e36325681252\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.130764 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5b558b4d-8398-435d-8925-e36325681252-etc-machine-id\") pod \"5b558b4d-8398-435d-8925-e36325681252\" (UID: \"5b558b4d-8398-435d-8925-e36325681252\") " Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.131150 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5b558b4d-8398-435d-8925-e36325681252-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5b558b4d-8398-435d-8925-e36325681252" (UID: "5b558b4d-8398-435d-8925-e36325681252"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.155550 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-scripts" (OuterVolumeSpecName: "scripts") pod "5b558b4d-8398-435d-8925-e36325681252" (UID: "5b558b4d-8398-435d-8925-e36325681252"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.156928 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b558b4d-8398-435d-8925-e36325681252-kube-api-access-bwrmm" (OuterVolumeSpecName: "kube-api-access-bwrmm") pod "5b558b4d-8398-435d-8925-e36325681252" (UID: "5b558b4d-8398-435d-8925-e36325681252"). InnerVolumeSpecName "kube-api-access-bwrmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.162669 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5b558b4d-8398-435d-8925-e36325681252" (UID: "5b558b4d-8398-435d-8925-e36325681252"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.166654 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b558b4d-8398-435d-8925-e36325681252" (UID: "5b558b4d-8398-435d-8925-e36325681252"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.231769 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-config-data" (OuterVolumeSpecName: "config-data") pod "5b558b4d-8398-435d-8925-e36325681252" (UID: "5b558b4d-8398-435d-8925-e36325681252"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.232169 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwrmm\" (UniqueName: \"kubernetes.io/projected/5b558b4d-8398-435d-8925-e36325681252-kube-api-access-bwrmm\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.232201 4685 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.232211 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.232220 4685 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5b558b4d-8398-435d-8925-e36325681252-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.232231 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.232239 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b558b4d-8398-435d-8925-e36325681252-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.309406 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerStarted","Data":"622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d"} Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.309522 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="ceilometer-notification-agent" containerID="cri-o://b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc" gracePeriod=30 Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.309584 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.309628 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="proxy-httpd" containerID="cri-o://622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d" gracePeriod=30 Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.309678 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="sg-core" containerID="cri-o://a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a" gracePeriod=30 Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.311790 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-lcv4t" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.315963 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-xx8rz" event={"ID":"5b558b4d-8398-435d-8925-e36325681252","Type":"ContainerDied","Data":"d885b5d5862014ec23199b58153df4d2b4d21134418dbf324ce405ed74df2ba1"} Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.315998 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d885b5d5862014ec23199b58153df4d2b4d21134418dbf324ce405ed74df2ba1" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.316053 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-xx8rz" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.378579 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-lcv4t"] Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.385950 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-lcv4t"] Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.824356 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:26 crc kubenswrapper[4685]: E1202 10:21:26.824761 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerName="dnsmasq-dns" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.824775 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerName="dnsmasq-dns" Dec 02 10:21:26 crc kubenswrapper[4685]: E1202 10:21:26.824787 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b558b4d-8398-435d-8925-e36325681252" containerName="cinder-db-sync" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.824793 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b558b4d-8398-435d-8925-e36325681252" containerName="cinder-db-sync" Dec 02 10:21:26 crc kubenswrapper[4685]: E1202 10:21:26.824812 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerName="init" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.824818 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerName="init" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.824998 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" containerName="dnsmasq-dns" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.825016 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b558b4d-8398-435d-8925-e36325681252" containerName="cinder-db-sync" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.846083 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.856855 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.857112 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zsjdb" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.857850 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.857960 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.872470 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.969717 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.969809 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.969854 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-scripts\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.969897 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a014994-2f2b-4386-8d2f-00e3657867a7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.969970 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp9dl\" (UniqueName: \"kubernetes.io/projected/9a014994-2f2b-4386-8d2f-00e3657867a7-kube-api-access-hp9dl\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:26 crc kubenswrapper[4685]: I1202 10:21:26.970006 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.075455 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.077271 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.077868 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.078182 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-scripts\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.078331 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a014994-2f2b-4386-8d2f-00e3657867a7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.078571 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp9dl\" (UniqueName: \"kubernetes.io/projected/9a014994-2f2b-4386-8d2f-00e3657867a7-kube-api-access-hp9dl\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.080709 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a014994-2f2b-4386-8d2f-00e3657867a7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.085135 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-scripts\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.088964 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.095826 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.099702 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.148917 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp9dl\" (UniqueName: \"kubernetes.io/projected/9a014994-2f2b-4386-8d2f-00e3657867a7-kube-api-access-hp9dl\") pod \"cinder-scheduler-0\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.223972 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.378583 4685 generic.go:334] "Generic (PLEG): container finished" podID="d644bf2c-8162-4066-b3c0-3751842256c9" containerID="a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a" exitCode=2 Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.378625 4685 generic.go:334] "Generic (PLEG): container finished" podID="d644bf2c-8162-4066-b3c0-3751842256c9" containerID="b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc" exitCode=0 Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.378666 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerDied","Data":"a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a"} Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.378696 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerDied","Data":"b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc"} Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.478327 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-q9z92"] Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.487711 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.554151 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-q9z92"] Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.591045 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4s5d\" (UniqueName: \"kubernetes.io/projected/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-kube-api-access-v4s5d\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.591093 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.591131 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.591180 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.591222 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.591264 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-config\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.609623 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.611168 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.615161 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.637237 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.697537 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4s5d\" (UniqueName: \"kubernetes.io/projected/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-kube-api-access-v4s5d\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.697900 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.697928 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.697963 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.697996 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.698039 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-config\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.699026 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-config\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.699580 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.700061 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.700529 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.710287 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.727595 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4s5d\" (UniqueName: \"kubernetes.io/projected/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-kube-api-access-v4s5d\") pod \"dnsmasq-dns-5c9776ccc5-q9z92\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.799904 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c676h\" (UniqueName: \"kubernetes.io/projected/9c2bfabf-1b91-414b-883d-6a748753d3a5-kube-api-access-c676h\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.800259 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.800302 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c2bfabf-1b91-414b-883d-6a748753d3a5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.800346 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.800379 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-scripts\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.800405 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c2bfabf-1b91-414b-883d-6a748753d3a5-logs\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.800441 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.879804 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.885290 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907699 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c676h\" (UniqueName: \"kubernetes.io/projected/9c2bfabf-1b91-414b-883d-6a748753d3a5-kube-api-access-c676h\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907769 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907815 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c2bfabf-1b91-414b-883d-6a748753d3a5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907851 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907892 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-scripts\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907916 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c2bfabf-1b91-414b-883d-6a748753d3a5-logs\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.907943 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.908044 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c2bfabf-1b91-414b-883d-6a748753d3a5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.908493 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c2bfabf-1b91-414b-883d-6a748753d3a5-logs\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.942494 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data-custom\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.955091 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-scripts\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.955780 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.960255 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c676h\" (UniqueName: \"kubernetes.io/projected/9c2bfabf-1b91-414b-883d-6a748753d3a5-kube-api-access-c676h\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.964184 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " pod="openstack/cinder-api-0" Dec 02 10:21:27 crc kubenswrapper[4685]: I1202 10:21:27.966340 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff77bc38-e9d1-400d-92cf-eb2154f26fa3" path="/var/lib/kubelet/pods/ff77bc38-e9d1-400d-92cf-eb2154f26fa3/volumes" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.039623 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.230267 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321351 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-combined-ca-bundle\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321439 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-sg-core-conf-yaml\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321547 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-config-data\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321622 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-run-httpd\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321678 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-scripts\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321695 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-log-httpd\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.321722 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk9mm\" (UniqueName: \"kubernetes.io/projected/d644bf2c-8162-4066-b3c0-3751842256c9-kube-api-access-tk9mm\") pod \"d644bf2c-8162-4066-b3c0-3751842256c9\" (UID: \"d644bf2c-8162-4066-b3c0-3751842256c9\") " Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.322261 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.325887 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.330576 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d644bf2c-8162-4066-b3c0-3751842256c9-kube-api-access-tk9mm" (OuterVolumeSpecName: "kube-api-access-tk9mm") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "kube-api-access-tk9mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.348612 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-scripts" (OuterVolumeSpecName: "scripts") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.424150 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.424181 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d644bf2c-8162-4066-b3c0-3751842256c9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.424191 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.424200 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk9mm\" (UniqueName: \"kubernetes.io/projected/d644bf2c-8162-4066-b3c0-3751842256c9-kube-api-access-tk9mm\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.429673 4685 generic.go:334] "Generic (PLEG): container finished" podID="d644bf2c-8162-4066-b3c0-3751842256c9" containerID="622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d" exitCode=0 Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.429831 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.430538 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerDied","Data":"622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d"} Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.430598 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d644bf2c-8162-4066-b3c0-3751842256c9","Type":"ContainerDied","Data":"466cdedf8237f4fdc31b7c65f4443ca7566eb8bafa585ff00337163bb29d57d2"} Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.430618 4685 scope.go:117] "RemoveContainer" containerID="622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.454457 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.468199 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a014994-2f2b-4386-8d2f-00e3657867a7","Type":"ContainerStarted","Data":"0e9297e3c6f0d91f2153e806b42c0531879d502fec17e81473d32ecd7d308dfa"} Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.528265 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.535382 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.540542 4685 scope.go:117] "RemoveContainer" containerID="a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.618281 4685 scope.go:117] "RemoveContainer" containerID="b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.630903 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.653752 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-65bd55bccb-s55r4" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.689950 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-config-data" (OuterVolumeSpecName: "config-data") pod "d644bf2c-8162-4066-b3c0-3751842256c9" (UID: "d644bf2c-8162-4066-b3c0-3751842256c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.732509 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d644bf2c-8162-4066-b3c0-3751842256c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.821534 4685 scope.go:117] "RemoveContainer" containerID="622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d" Dec 02 10:21:28 crc kubenswrapper[4685]: E1202 10:21:28.823972 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d\": container with ID starting with 622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d not found: ID does not exist" containerID="622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.824008 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d"} err="failed to get container status \"622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d\": rpc error: code = NotFound desc = could not find container \"622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d\": container with ID starting with 622412b5ae33a579a2825792c3cac63071ffe0376bbbb9fc3e5acfd4d71a249d not found: ID does not exist" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.824033 4685 scope.go:117] "RemoveContainer" containerID="a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a" Dec 02 10:21:28 crc kubenswrapper[4685]: E1202 10:21:28.824575 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a\": container with ID starting with a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a not found: ID does not exist" containerID="a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.824601 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a"} err="failed to get container status \"a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a\": rpc error: code = NotFound desc = could not find container \"a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a\": container with ID starting with a5b6074760364daa26f6a42eed03857c393d7790455c5fecc3706193ba3eb19a not found: ID does not exist" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.824613 4685 scope.go:117] "RemoveContainer" containerID="b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc" Dec 02 10:21:28 crc kubenswrapper[4685]: E1202 10:21:28.837841 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc\": container with ID starting with b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc not found: ID does not exist" containerID="b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc" Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.837888 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc"} err="failed to get container status \"b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc\": rpc error: code = NotFound desc = could not find container \"b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc\": container with ID starting with b38f721018ff5f1c5677b54291b9248549a0172a5eb440a585addc560b4a08dc not found: ID does not exist" Dec 02 10:21:28 crc kubenswrapper[4685]: W1202 10:21:28.837951 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c751a52_1597_4bbf_b9d7_79e8bcf0f9dc.slice/crio-a1a64337d89352a0b90296221c226772b284833d2848be8c698b9706f2163825 WatchSource:0}: Error finding container a1a64337d89352a0b90296221c226772b284833d2848be8c698b9706f2163825: Status 404 returned error can't find the container with id a1a64337d89352a0b90296221c226772b284833d2848be8c698b9706f2163825 Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.889333 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-q9z92"] Dec 02 10:21:28 crc kubenswrapper[4685]: I1202 10:21:28.914622 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:28.997446 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.018732 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:29 crc kubenswrapper[4685]: E1202 10:21:29.019710 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="ceilometer-notification-agent" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.019736 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="ceilometer-notification-agent" Dec 02 10:21:29 crc kubenswrapper[4685]: E1202 10:21:29.019764 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="sg-core" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.019772 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="sg-core" Dec 02 10:21:29 crc kubenswrapper[4685]: E1202 10:21:29.019796 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="proxy-httpd" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.019805 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="proxy-httpd" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.020349 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="sg-core" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.020372 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="ceilometer-notification-agent" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.020417 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" containerName="proxy-httpd" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.024318 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.028703 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.028992 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.056460 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.090105 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164577 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-scripts\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164640 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164663 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164728 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlnrn\" (UniqueName: \"kubernetes.io/projected/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-kube-api-access-wlnrn\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164754 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-config-data\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164784 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-run-httpd\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.164800 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-log-httpd\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270520 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-run-httpd\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270792 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-log-httpd\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270848 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-scripts\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270882 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270905 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270971 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlnrn\" (UniqueName: \"kubernetes.io/projected/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-kube-api-access-wlnrn\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.270996 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-config-data\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.274270 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-run-httpd\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.275517 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-log-httpd\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.292007 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-scripts\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.296161 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.301276 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.302145 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-config-data\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.326414 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlnrn\" (UniqueName: \"kubernetes.io/projected/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-kube-api-access-wlnrn\") pod \"ceilometer-0\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.428238 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.566187 4685 generic.go:334] "Generic (PLEG): container finished" podID="129679f9-1a88-4185-aafd-512749132f28" containerID="ca4cbbbc5e5146fcac5adb5f35ce091207ae56bd84e1efe4b8597de362c39d86" exitCode=0 Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.566253 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7f7b7954-l6dql" event={"ID":"129679f9-1a88-4185-aafd-512749132f28","Type":"ContainerDied","Data":"ca4cbbbc5e5146fcac5adb5f35ce091207ae56bd84e1efe4b8597de362c39d86"} Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.593800 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c2bfabf-1b91-414b-883d-6a748753d3a5","Type":"ContainerStarted","Data":"7d12d455e5ed20cee90e02aaca94e507393c39ac41364a40d872df602ccde07c"} Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.601612 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" event={"ID":"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc","Type":"ContainerStarted","Data":"a1a64337d89352a0b90296221c226772b284833d2848be8c698b9706f2163825"} Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.877952 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:21:29 crc kubenswrapper[4685]: I1202 10:21:29.914000 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d644bf2c-8162-4066-b3c0-3751842256c9" path="/var/lib/kubelet/pods/d644bf2c-8162-4066-b3c0-3751842256c9/volumes" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.001130 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqs68\" (UniqueName: \"kubernetes.io/projected/129679f9-1a88-4185-aafd-512749132f28-kube-api-access-gqs68\") pod \"129679f9-1a88-4185-aafd-512749132f28\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.001213 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-combined-ca-bundle\") pod \"129679f9-1a88-4185-aafd-512749132f28\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.001270 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-httpd-config\") pod \"129679f9-1a88-4185-aafd-512749132f28\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.001296 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-config\") pod \"129679f9-1a88-4185-aafd-512749132f28\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.001411 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-ovndb-tls-certs\") pod \"129679f9-1a88-4185-aafd-512749132f28\" (UID: \"129679f9-1a88-4185-aafd-512749132f28\") " Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.018478 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "129679f9-1a88-4185-aafd-512749132f28" (UID: "129679f9-1a88-4185-aafd-512749132f28"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.020749 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/129679f9-1a88-4185-aafd-512749132f28-kube-api-access-gqs68" (OuterVolumeSpecName: "kube-api-access-gqs68") pod "129679f9-1a88-4185-aafd-512749132f28" (UID: "129679f9-1a88-4185-aafd-512749132f28"). InnerVolumeSpecName "kube-api-access-gqs68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.109934 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqs68\" (UniqueName: \"kubernetes.io/projected/129679f9-1a88-4185-aafd-512749132f28-kube-api-access-gqs68\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.109972 4685 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.204104 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-config" (OuterVolumeSpecName: "config") pod "129679f9-1a88-4185-aafd-512749132f28" (UID: "129679f9-1a88-4185-aafd-512749132f28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.214144 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "129679f9-1a88-4185-aafd-512749132f28" (UID: "129679f9-1a88-4185-aafd-512749132f28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.215302 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.215319 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.251967 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "129679f9-1a88-4185-aafd-512749132f28" (UID: "129679f9-1a88-4185-aafd-512749132f28"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.320619 4685 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/129679f9-1a88-4185-aafd-512749132f28-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.533187 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:30 crc kubenswrapper[4685]: W1202 10:21:30.560941 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4d10d7e_ad74_4171_b8e6_35c6977afb8e.slice/crio-99b2f3fd9c01bde5c8d00d66e731bd3e266606110cbeac7b2b1553fe3fe23266 WatchSource:0}: Error finding container 99b2f3fd9c01bde5c8d00d66e731bd3e266606110cbeac7b2b1553fe3fe23266: Status 404 returned error can't find the container with id 99b2f3fd9c01bde5c8d00d66e731bd3e266606110cbeac7b2b1553fe3fe23266 Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.616980 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d7f7b7954-l6dql" event={"ID":"129679f9-1a88-4185-aafd-512749132f28","Type":"ContainerDied","Data":"2000381ca6445eb68a99d2b1ea1830ff31f891963539a79120d5c1b08c16ae36"} Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.617052 4685 scope.go:117] "RemoveContainer" containerID="52fcf384c1da3a272b227f8dd9b367730d65904701f4c6fb9e2172653caccde2" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.617552 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d7f7b7954-l6dql" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.632812 4685 generic.go:334] "Generic (PLEG): container finished" podID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerID="194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5" exitCode=0 Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.632964 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" event={"ID":"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc","Type":"ContainerDied","Data":"194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5"} Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.640444 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerStarted","Data":"99b2f3fd9c01bde5c8d00d66e731bd3e266606110cbeac7b2b1553fe3fe23266"} Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.893918 4685 scope.go:117] "RemoveContainer" containerID="ca4cbbbc5e5146fcac5adb5f35ce091207ae56bd84e1efe4b8597de362c39d86" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.895345 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6d7f7b7954-l6dql"] Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.901698 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-65bd55bccb-s55r4" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.909862 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6d7f7b7954-l6dql"] Dec 02 10:21:30 crc kubenswrapper[4685]: I1202 10:21:30.911960 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-56bd4844cd-s5f2m" podUID="746491bf-6d00-4370-a7ba-740687bd6faa" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.158:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:21:31 crc kubenswrapper[4685]: I1202 10:21:31.115811 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:21:31 crc kubenswrapper[4685]: I1202 10:21:31.653225 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c2bfabf-1b91-414b-883d-6a748753d3a5","Type":"ContainerStarted","Data":"a5aa81db72266fb451e85c33fa26a9287020cdae878317d16fbfcb6f21c11728"} Dec 02 10:21:31 crc kubenswrapper[4685]: I1202 10:21:31.912737 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="129679f9-1a88-4185-aafd-512749132f28" path="/var/lib/kubelet/pods/129679f9-1a88-4185-aafd-512749132f28/volumes" Dec 02 10:21:32 crc kubenswrapper[4685]: I1202 10:21:32.679275 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" event={"ID":"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc","Type":"ContainerStarted","Data":"2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6"} Dec 02 10:21:32 crc kubenswrapper[4685]: I1202 10:21:32.680709 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:32 crc kubenswrapper[4685]: I1202 10:21:32.687016 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerStarted","Data":"4c832e9cd3ac468237b5b0811018c0917a051508c2ea476e8b8d484ff2258cc5"} Dec 02 10:21:32 crc kubenswrapper[4685]: I1202 10:21:32.690106 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a014994-2f2b-4386-8d2f-00e3657867a7","Type":"ContainerStarted","Data":"7f9fd115bc1666dbcfa7f516d4a5d97ca87152a7a4da8afe196625e175f79def"} Dec 02 10:21:32 crc kubenswrapper[4685]: I1202 10:21:32.727801 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" podStartSLOduration=5.727774646 podStartE2EDuration="5.727774646s" podCreationTimestamp="2025-12-02 10:21:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:32.706032911 +0000 UTC m=+1185.077807075" watchObservedRunningTime="2025-12-02 10:21:32.727774646 +0000 UTC m=+1185.099548801" Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.700369 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a014994-2f2b-4386-8d2f-00e3657867a7","Type":"ContainerStarted","Data":"386fc95f4eabf5a572cbc8b0b7e72363ecb20a778c0b431116dededcafa685ad"} Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.702979 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api-log" containerID="cri-o://a5aa81db72266fb451e85c33fa26a9287020cdae878317d16fbfcb6f21c11728" gracePeriod=30 Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.703422 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c2bfabf-1b91-414b-883d-6a748753d3a5","Type":"ContainerStarted","Data":"032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e"} Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.703520 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.703545 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api" containerID="cri-o://032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e" gracePeriod=30 Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.746773 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.986356604 podStartE2EDuration="7.746753337s" podCreationTimestamp="2025-12-02 10:21:26 +0000 UTC" firstStartedPulling="2025-12-02 10:21:27.928329931 +0000 UTC m=+1180.300104085" lastFinishedPulling="2025-12-02 10:21:29.688726664 +0000 UTC m=+1182.060500818" observedRunningTime="2025-12-02 10:21:33.728418535 +0000 UTC m=+1186.100192689" watchObservedRunningTime="2025-12-02 10:21:33.746753337 +0000 UTC m=+1186.118527491" Dec 02 10:21:33 crc kubenswrapper[4685]: I1202 10:21:33.749326 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.749309247 podStartE2EDuration="6.749309247s" podCreationTimestamp="2025-12-02 10:21:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:33.745751109 +0000 UTC m=+1186.117525263" watchObservedRunningTime="2025-12-02 10:21:33.749309247 +0000 UTC m=+1186.121083401" Dec 02 10:21:34 crc kubenswrapper[4685]: I1202 10:21:34.720385 4685 generic.go:334] "Generic (PLEG): container finished" podID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerID="a5aa81db72266fb451e85c33fa26a9287020cdae878317d16fbfcb6f21c11728" exitCode=143 Dec 02 10:21:34 crc kubenswrapper[4685]: I1202 10:21:34.720416 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c2bfabf-1b91-414b-883d-6a748753d3a5","Type":"ContainerDied","Data":"a5aa81db72266fb451e85c33fa26a9287020cdae878317d16fbfcb6f21c11728"} Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.263221 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-56bd4844cd-s5f2m" Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.342386 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-65bd55bccb-s55r4"] Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.342630 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-65bd55bccb-s55r4" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api-log" containerID="cri-o://4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698" gracePeriod=30 Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.342703 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-65bd55bccb-s55r4" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api" containerID="cri-o://d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8" gracePeriod=30 Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.733294 4685 generic.go:334] "Generic (PLEG): container finished" podID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerID="4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698" exitCode=143 Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.733391 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65bd55bccb-s55r4" event={"ID":"f64b77d2-5c73-478c-b793-20e5982cdea6","Type":"ContainerDied","Data":"4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698"} Dec 02 10:21:35 crc kubenswrapper[4685]: I1202 10:21:35.735312 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerStarted","Data":"7909d0ef85877877a3bfcfbc3c08286188fc338d2d00bb05f361929f02d42d1a"} Dec 02 10:21:36 crc kubenswrapper[4685]: I1202 10:21:36.183452 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:21:36 crc kubenswrapper[4685]: I1202 10:21:36.231272 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:21:36 crc kubenswrapper[4685]: I1202 10:21:36.745764 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerStarted","Data":"1f19d41de2fca987e5f33605f4804551e737505cc0a8620d24661b23fc0c71a5"} Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.224900 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.557287 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.769542 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerStarted","Data":"32b5987bb948af220f6dc614884d0d70effff4952f752820e24ace1496814de3"} Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.769607 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.828036 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.838652 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.224025657 podStartE2EDuration="9.838627755s" podCreationTimestamp="2025-12-02 10:21:28 +0000 UTC" firstStartedPulling="2025-12-02 10:21:30.564734172 +0000 UTC m=+1182.936508336" lastFinishedPulling="2025-12-02 10:21:37.17933629 +0000 UTC m=+1189.551110434" observedRunningTime="2025-12-02 10:21:37.83697696 +0000 UTC m=+1190.208751114" watchObservedRunningTime="2025-12-02 10:21:37.838627755 +0000 UTC m=+1190.210401919" Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.883775 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.958474 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-z7f8q"] Dec 02 10:21:37 crc kubenswrapper[4685]: I1202 10:21:37.962762 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerName="dnsmasq-dns" containerID="cri-o://82e6f22d52fea00fbb032cf900f31905912c2587db2158a6933c6c34fa3474ad" gracePeriod=10 Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.546244 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.782795 4685 generic.go:334] "Generic (PLEG): container finished" podID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerID="82e6f22d52fea00fbb032cf900f31905912c2587db2158a6933c6c34fa3474ad" exitCode=0 Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.782874 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" event={"ID":"6e8726be-5d67-4d87-8bf0-fc7a95f575e1","Type":"ContainerDied","Data":"82e6f22d52fea00fbb032cf900f31905912c2587db2158a6933c6c34fa3474ad"} Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.783052 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="probe" containerID="cri-o://386fc95f4eabf5a572cbc8b0b7e72363ecb20a778c0b431116dededcafa685ad" gracePeriod=30 Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.783170 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="cinder-scheduler" containerID="cri-o://7f9fd115bc1666dbcfa7f516d4a5d97ca87152a7a4da8afe196625e175f79def" gracePeriod=30 Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.821925 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-65bd55bccb-s55r4" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:46468->10.217.0.157:9311: read: connection reset by peer" Dec 02 10:21:38 crc kubenswrapper[4685]: I1202 10:21:38.822236 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-65bd55bccb-s55r4" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:46458->10.217.0.157:9311: read: connection reset by peer" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.203472 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-79bf856964-lh2w8" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.221181 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.311473 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-svc\") pod \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.311978 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-config\") pod \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.312145 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-sb\") pod \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.312229 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvzpv\" (UniqueName: \"kubernetes.io/projected/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-kube-api-access-bvzpv\") pod \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.312356 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-nb\") pod \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.312482 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-swift-storage-0\") pod \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\" (UID: \"6e8726be-5d67-4d87-8bf0-fc7a95f575e1\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.316259 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cb9cd6bb4-c4gq9"] Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.316464 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon-log" containerID="cri-o://783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3" gracePeriod=30 Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.319677 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" containerID="cri-o://e30782b4f5791754d39c2ed2d23d4a30b74d4fd535fd8045c89bbffd9d86c01d" gracePeriod=30 Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.385758 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-kube-api-access-bvzpv" (OuterVolumeSpecName: "kube-api-access-bvzpv") pod "6e8726be-5d67-4d87-8bf0-fc7a95f575e1" (UID: "6e8726be-5d67-4d87-8bf0-fc7a95f575e1"). InnerVolumeSpecName "kube-api-access-bvzpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.419902 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvzpv\" (UniqueName: \"kubernetes.io/projected/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-kube-api-access-bvzpv\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.544890 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.549243 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6e8726be-5d67-4d87-8bf0-fc7a95f575e1" (UID: "6e8726be-5d67-4d87-8bf0-fc7a95f575e1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.561096 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-config" (OuterVolumeSpecName: "config") pod "6e8726be-5d67-4d87-8bf0-fc7a95f575e1" (UID: "6e8726be-5d67-4d87-8bf0-fc7a95f575e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.587555 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6e8726be-5d67-4d87-8bf0-fc7a95f575e1" (UID: "6e8726be-5d67-4d87-8bf0-fc7a95f575e1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.588333 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6e8726be-5d67-4d87-8bf0-fc7a95f575e1" (UID: "6e8726be-5d67-4d87-8bf0-fc7a95f575e1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.589315 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6e8726be-5d67-4d87-8bf0-fc7a95f575e1" (UID: "6e8726be-5d67-4d87-8bf0-fc7a95f575e1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.634660 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fngzw\" (UniqueName: \"kubernetes.io/projected/f64b77d2-5c73-478c-b793-20e5982cdea6-kube-api-access-fngzw\") pod \"f64b77d2-5c73-478c-b793-20e5982cdea6\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.634813 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-combined-ca-bundle\") pod \"f64b77d2-5c73-478c-b793-20e5982cdea6\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.634865 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data\") pod \"f64b77d2-5c73-478c-b793-20e5982cdea6\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.634927 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64b77d2-5c73-478c-b793-20e5982cdea6-logs\") pod \"f64b77d2-5c73-478c-b793-20e5982cdea6\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.634951 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data-custom\") pod \"f64b77d2-5c73-478c-b793-20e5982cdea6\" (UID: \"f64b77d2-5c73-478c-b793-20e5982cdea6\") " Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.637966 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.638002 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.638007 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f64b77d2-5c73-478c-b793-20e5982cdea6-logs" (OuterVolumeSpecName: "logs") pod "f64b77d2-5c73-478c-b793-20e5982cdea6" (UID: "f64b77d2-5c73-478c-b793-20e5982cdea6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.638015 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.638052 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.638065 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e8726be-5d67-4d87-8bf0-fc7a95f575e1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.644615 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f64b77d2-5c73-478c-b793-20e5982cdea6" (UID: "f64b77d2-5c73-478c-b793-20e5982cdea6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.654697 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f64b77d2-5c73-478c-b793-20e5982cdea6-kube-api-access-fngzw" (OuterVolumeSpecName: "kube-api-access-fngzw") pod "f64b77d2-5c73-478c-b793-20e5982cdea6" (UID: "f64b77d2-5c73-478c-b793-20e5982cdea6"). InnerVolumeSpecName "kube-api-access-fngzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.711748 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f64b77d2-5c73-478c-b793-20e5982cdea6" (UID: "f64b77d2-5c73-478c-b793-20e5982cdea6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.739679 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.739716 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64b77d2-5c73-478c-b793-20e5982cdea6-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.739728 4685 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.739739 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fngzw\" (UniqueName: \"kubernetes.io/projected/f64b77d2-5c73-478c-b793-20e5982cdea6-kube-api-access-fngzw\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.743698 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data" (OuterVolumeSpecName: "config-data") pod "f64b77d2-5c73-478c-b793-20e5982cdea6" (UID: "f64b77d2-5c73-478c-b793-20e5982cdea6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.809609 4685 generic.go:334] "Generic (PLEG): container finished" podID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerID="d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8" exitCode=0 Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.809708 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65bd55bccb-s55r4" event={"ID":"f64b77d2-5c73-478c-b793-20e5982cdea6","Type":"ContainerDied","Data":"d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8"} Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.809758 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-65bd55bccb-s55r4" event={"ID":"f64b77d2-5c73-478c-b793-20e5982cdea6","Type":"ContainerDied","Data":"81b820407420bdc27aee5c77e47fc7dea5150b861b58778d95a5844aa7af4733"} Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.809790 4685 scope.go:117] "RemoveContainer" containerID="d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.810005 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-65bd55bccb-s55r4" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.827807 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" event={"ID":"6e8726be-5d67-4d87-8bf0-fc7a95f575e1","Type":"ContainerDied","Data":"0f2fdf937b0045efd4ab32a7122fd09fc59111f69fcf0bb3dde6cd7b12ae52b2"} Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.827940 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-z7f8q" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.842151 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64b77d2-5c73-478c-b793-20e5982cdea6-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.866619 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-65bd55bccb-s55r4"] Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.875904 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-65bd55bccb-s55r4"] Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.888740 4685 scope.go:117] "RemoveContainer" containerID="4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.941721 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" path="/var/lib/kubelet/pods/f64b77d2-5c73-478c-b793-20e5982cdea6/volumes" Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.943704 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-z7f8q"] Dec 02 10:21:39 crc kubenswrapper[4685]: I1202 10:21:39.943835 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-z7f8q"] Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.087045 4685 scope.go:117] "RemoveContainer" containerID="d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8" Dec 02 10:21:40 crc kubenswrapper[4685]: E1202 10:21:40.090726 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8\": container with ID starting with d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8 not found: ID does not exist" containerID="d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8" Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.090781 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8"} err="failed to get container status \"d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8\": rpc error: code = NotFound desc = could not find container \"d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8\": container with ID starting with d30f5f1094e0eb5004acff92bb1701cd759bd68fd6e23df58333420561071db8 not found: ID does not exist" Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.090815 4685 scope.go:117] "RemoveContainer" containerID="4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698" Dec 02 10:21:40 crc kubenswrapper[4685]: E1202 10:21:40.092754 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698\": container with ID starting with 4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698 not found: ID does not exist" containerID="4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698" Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.092805 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698"} err="failed to get container status \"4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698\": rpc error: code = NotFound desc = could not find container \"4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698\": container with ID starting with 4f429bf0ebb6ca158f92675fb3bcf8181dda0fdb7a650737413ae8c025543698 not found: ID does not exist" Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.092838 4685 scope.go:117] "RemoveContainer" containerID="82e6f22d52fea00fbb032cf900f31905912c2587db2158a6933c6c34fa3474ad" Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.121823 4685 scope.go:117] "RemoveContainer" containerID="ee4bcd9e421a286c71ecbde735936f2187b305fb2114157c0a7597d3d959907a" Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.864134 4685 generic.go:334] "Generic (PLEG): container finished" podID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerID="386fc95f4eabf5a572cbc8b0b7e72363ecb20a778c0b431116dededcafa685ad" exitCode=0 Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.864168 4685 generic.go:334] "Generic (PLEG): container finished" podID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerID="7f9fd115bc1666dbcfa7f516d4a5d97ca87152a7a4da8afe196625e175f79def" exitCode=0 Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.864210 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a014994-2f2b-4386-8d2f-00e3657867a7","Type":"ContainerDied","Data":"386fc95f4eabf5a572cbc8b0b7e72363ecb20a778c0b431116dededcafa685ad"} Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.864236 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a014994-2f2b-4386-8d2f-00e3657867a7","Type":"ContainerDied","Data":"7f9fd115bc1666dbcfa7f516d4a5d97ca87152a7a4da8afe196625e175f79def"} Dec 02 10:21:40 crc kubenswrapper[4685]: I1202 10:21:40.907978 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6549967485-7j47k" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338241 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 02 10:21:41 crc kubenswrapper[4685]: E1202 10:21:41.338685 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-httpd" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338700 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-httpd" Dec 02 10:21:41 crc kubenswrapper[4685]: E1202 10:21:41.338714 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerName="init" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338721 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerName="init" Dec 02 10:21:41 crc kubenswrapper[4685]: E1202 10:21:41.338735 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338741 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api" Dec 02 10:21:41 crc kubenswrapper[4685]: E1202 10:21:41.338758 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-api" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338764 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-api" Dec 02 10:21:41 crc kubenswrapper[4685]: E1202 10:21:41.338777 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api-log" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338783 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api-log" Dec 02 10:21:41 crc kubenswrapper[4685]: E1202 10:21:41.338804 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerName="dnsmasq-dns" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338810 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerName="dnsmasq-dns" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338956 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" containerName="dnsmasq-dns" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.338976 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.339002 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-api" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.339022 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64b77d2-5c73-478c-b793-20e5982cdea6" containerName="barbican-api-log" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.339029 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="129679f9-1a88-4185-aafd-512749132f28" containerName="neutron-httpd" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.339597 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.342134 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-zlt4h" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.342945 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.347634 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.355006 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.460588 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.487765 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-openstack-config-secret\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.487877 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k87f\" (UniqueName: \"kubernetes.io/projected/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-kube-api-access-9k87f\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.487899 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-openstack-config\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.487915 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-combined-ca-bundle\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.589097 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp9dl\" (UniqueName: \"kubernetes.io/projected/9a014994-2f2b-4386-8d2f-00e3657867a7-kube-api-access-hp9dl\") pod \"9a014994-2f2b-4386-8d2f-00e3657867a7\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.589383 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-scripts\") pod \"9a014994-2f2b-4386-8d2f-00e3657867a7\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.589463 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a014994-2f2b-4386-8d2f-00e3657867a7-etc-machine-id\") pod \"9a014994-2f2b-4386-8d2f-00e3657867a7\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.589620 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data-custom\") pod \"9a014994-2f2b-4386-8d2f-00e3657867a7\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.589648 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data\") pod \"9a014994-2f2b-4386-8d2f-00e3657867a7\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.589677 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-combined-ca-bundle\") pod \"9a014994-2f2b-4386-8d2f-00e3657867a7\" (UID: \"9a014994-2f2b-4386-8d2f-00e3657867a7\") " Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.590052 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-openstack-config-secret\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.590052 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9a014994-2f2b-4386-8d2f-00e3657867a7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9a014994-2f2b-4386-8d2f-00e3657867a7" (UID: "9a014994-2f2b-4386-8d2f-00e3657867a7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.590389 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k87f\" (UniqueName: \"kubernetes.io/projected/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-kube-api-access-9k87f\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.590440 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-combined-ca-bundle\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.590471 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-openstack-config\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.590723 4685 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9a014994-2f2b-4386-8d2f-00e3657867a7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.591528 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-openstack-config\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.599339 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-openstack-config-secret\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.607094 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-scripts" (OuterVolumeSpecName: "scripts") pod "9a014994-2f2b-4386-8d2f-00e3657867a7" (UID: "9a014994-2f2b-4386-8d2f-00e3657867a7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.608801 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a014994-2f2b-4386-8d2f-00e3657867a7-kube-api-access-hp9dl" (OuterVolumeSpecName: "kube-api-access-hp9dl") pod "9a014994-2f2b-4386-8d2f-00e3657867a7" (UID: "9a014994-2f2b-4386-8d2f-00e3657867a7"). InnerVolumeSpecName "kube-api-access-hp9dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.609553 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9a014994-2f2b-4386-8d2f-00e3657867a7" (UID: "9a014994-2f2b-4386-8d2f-00e3657867a7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.612641 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-combined-ca-bundle\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.630221 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k87f\" (UniqueName: \"kubernetes.io/projected/62bb7fcc-d4cc-4b1d-b29a-98d6f3441731-kube-api-access-9k87f\") pod \"openstackclient\" (UID: \"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731\") " pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.693172 4685 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.693201 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp9dl\" (UniqueName: \"kubernetes.io/projected/9a014994-2f2b-4386-8d2f-00e3657867a7-kube-api-access-hp9dl\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.693213 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.700771 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a014994-2f2b-4386-8d2f-00e3657867a7" (UID: "9a014994-2f2b-4386-8d2f-00e3657867a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.760033 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.785168 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data" (OuterVolumeSpecName: "config-data") pod "9a014994-2f2b-4386-8d2f-00e3657867a7" (UID: "9a014994-2f2b-4386-8d2f-00e3657867a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.796042 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.796073 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a014994-2f2b-4386-8d2f-00e3657867a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.887381 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9a014994-2f2b-4386-8d2f-00e3657867a7","Type":"ContainerDied","Data":"0e9297e3c6f0d91f2153e806b42c0531879d502fec17e81473d32ecd7d308dfa"} Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.887448 4685 scope.go:117] "RemoveContainer" containerID="386fc95f4eabf5a572cbc8b0b7e72363ecb20a778c0b431116dededcafa685ad" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.887488 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.916569 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e8726be-5d67-4d87-8bf0-fc7a95f575e1" path="/var/lib/kubelet/pods/6e8726be-5d67-4d87-8bf0-fc7a95f575e1/volumes" Dec 02 10:21:41 crc kubenswrapper[4685]: I1202 10:21:41.979855 4685 scope.go:117] "RemoveContainer" containerID="7f9fd115bc1666dbcfa7f516d4a5d97ca87152a7a4da8afe196625e175f79def" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.001867 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.039469 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.055054 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:42 crc kubenswrapper[4685]: E1202 10:21:42.055520 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="probe" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.055535 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="probe" Dec 02 10:21:42 crc kubenswrapper[4685]: E1202 10:21:42.055547 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="cinder-scheduler" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.055553 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="cinder-scheduler" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.055760 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="probe" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.055772 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" containerName="cinder-scheduler" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.065716 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.070892 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.085333 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.214489 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-scripts\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.214538 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.214599 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-config-data\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.214621 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880ba69c-db0d-4fab-b46d-45c7e8915684-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.214689 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.214763 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8kr2\" (UniqueName: \"kubernetes.io/projected/880ba69c-db0d-4fab-b46d-45c7e8915684-kube-api-access-n8kr2\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.315760 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-config-data\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.315808 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880ba69c-db0d-4fab-b46d-45c7e8915684-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.315877 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.315961 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8kr2\" (UniqueName: \"kubernetes.io/projected/880ba69c-db0d-4fab-b46d-45c7e8915684-kube-api-access-n8kr2\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.315993 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-scripts\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.316015 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.316993 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/880ba69c-db0d-4fab-b46d-45c7e8915684-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.329419 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.336511 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.337276 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-config-data\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.338028 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/880ba69c-db0d-4fab-b46d-45c7e8915684-scripts\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.364177 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8kr2\" (UniqueName: \"kubernetes.io/projected/880ba69c-db0d-4fab-b46d-45c7e8915684-kube-api-access-n8kr2\") pod \"cinder-scheduler-0\" (UID: \"880ba69c-db0d-4fab-b46d-45c7e8915684\") " pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.401880 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.432316 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.432829 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.805901 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:41630->10.217.0.144:8443: read: connection reset by peer" Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.909209 4685 generic.go:334] "Generic (PLEG): container finished" podID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerID="e30782b4f5791754d39c2ed2d23d4a30b74d4fd535fd8045c89bbffd9d86c01d" exitCode=0 Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.909281 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb9cd6bb4-c4gq9" event={"ID":"04ab41a3-3d00-4fbd-8e1f-2995658eba9c","Type":"ContainerDied","Data":"e30782b4f5791754d39c2ed2d23d4a30b74d4fd535fd8045c89bbffd9d86c01d"} Dec 02 10:21:42 crc kubenswrapper[4685]: I1202 10:21:42.918087 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731","Type":"ContainerStarted","Data":"ebdc0e8cde0eeb9df9737e436949ebb9f41cd2abe6f3ff4a878e40f08f3637d0"} Dec 02 10:21:43 crc kubenswrapper[4685]: W1202 10:21:43.067765 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod880ba69c_db0d_4fab_b46d_45c7e8915684.slice/crio-d824f91909041c601c974df7edd5f46b84c4c12bbaf1d7078b3d9a230326776a WatchSource:0}: Error finding container d824f91909041c601c974df7edd5f46b84c4c12bbaf1d7078b3d9a230326776a: Status 404 returned error can't find the container with id d824f91909041c601c974df7edd5f46b84c4c12bbaf1d7078b3d9a230326776a Dec 02 10:21:43 crc kubenswrapper[4685]: I1202 10:21:43.078643 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 02 10:21:43 crc kubenswrapper[4685]: I1202 10:21:43.236289 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:43 crc kubenswrapper[4685]: I1202 10:21:43.700896 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-9f589dd8-wf2cx" Dec 02 10:21:43 crc kubenswrapper[4685]: I1202 10:21:43.945600 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a014994-2f2b-4386-8d2f-00e3657867a7" path="/var/lib/kubelet/pods/9a014994-2f2b-4386-8d2f-00e3657867a7/volumes" Dec 02 10:21:43 crc kubenswrapper[4685]: I1202 10:21:43.959968 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"880ba69c-db0d-4fab-b46d-45c7e8915684","Type":"ContainerStarted","Data":"d824f91909041c601c974df7edd5f46b84c4c12bbaf1d7078b3d9a230326776a"} Dec 02 10:21:44 crc kubenswrapper[4685]: I1202 10:21:44.969722 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"880ba69c-db0d-4fab-b46d-45c7e8915684","Type":"ContainerStarted","Data":"33d85215283983b351324e0ab2e0fb4427b18973af810c8fa0d47b5e90d350ed"} Dec 02 10:21:46 crc kubenswrapper[4685]: I1202 10:21:46.991894 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"880ba69c-db0d-4fab-b46d-45c7e8915684","Type":"ContainerStarted","Data":"5d73da5d949fa9739b7f6431da62d6fbf6e066823c305923178998b0e6210d4a"} Dec 02 10:21:47 crc kubenswrapper[4685]: I1202 10:21:47.009799 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=6.009761399 podStartE2EDuration="6.009761399s" podCreationTimestamp="2025-12-02 10:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:21:47.006821488 +0000 UTC m=+1199.378595642" watchObservedRunningTime="2025-12-02 10:21:47.009761399 +0000 UTC m=+1199.381535553" Dec 02 10:21:47 crc kubenswrapper[4685]: I1202 10:21:47.432984 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.436874 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-586f964d77-g52q4"] Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.440005 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.444924 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.445336 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.445489 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.459713 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-586f964d77-g52q4"] Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566048 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ad077a4-4937-4de8-9da4-ca0f58f3adef-log-httpd\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566134 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqcwt\" (UniqueName: \"kubernetes.io/projected/4ad077a4-4937-4de8-9da4-ca0f58f3adef-kube-api-access-jqcwt\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566164 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ad077a4-4937-4de8-9da4-ca0f58f3adef-run-httpd\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566209 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-public-tls-certs\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566249 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-config-data\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566285 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4ad077a4-4937-4de8-9da4-ca0f58f3adef-etc-swift\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566376 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-combined-ca-bundle\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.566402 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-internal-tls-certs\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668230 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ad077a4-4937-4de8-9da4-ca0f58f3adef-log-httpd\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668306 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqcwt\" (UniqueName: \"kubernetes.io/projected/4ad077a4-4937-4de8-9da4-ca0f58f3adef-kube-api-access-jqcwt\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668328 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ad077a4-4937-4de8-9da4-ca0f58f3adef-run-httpd\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668363 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-public-tls-certs\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668395 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-config-data\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668422 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4ad077a4-4937-4de8-9da4-ca0f58f3adef-etc-swift\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668471 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-combined-ca-bundle\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668490 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-internal-tls-certs\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.668710 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ad077a4-4937-4de8-9da4-ca0f58f3adef-log-httpd\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.669179 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ad077a4-4937-4de8-9da4-ca0f58f3adef-run-httpd\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.676211 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-combined-ca-bundle\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.680259 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-public-tls-certs\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.685130 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4ad077a4-4937-4de8-9da4-ca0f58f3adef-etc-swift\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.695550 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-config-data\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.699913 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ad077a4-4937-4de8-9da4-ca0f58f3adef-internal-tls-certs\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.703300 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqcwt\" (UniqueName: \"kubernetes.io/projected/4ad077a4-4937-4de8-9da4-ca0f58f3adef-kube-api-access-jqcwt\") pod \"swift-proxy-586f964d77-g52q4\" (UID: \"4ad077a4-4937-4de8-9da4-ca0f58f3adef\") " pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:49 crc kubenswrapper[4685]: I1202 10:21:49.763261 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:21:50 crc kubenswrapper[4685]: I1202 10:21:50.849785 4685 scope.go:117] "RemoveContainer" containerID="fcba25e141ec1a7ace8a02b49595baeda9577f63c20ac5055de5a98edaca23ae" Dec 02 10:21:51 crc kubenswrapper[4685]: I1202 10:21:51.374346 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.408233 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.408509 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-central-agent" containerID="cri-o://4c832e9cd3ac468237b5b0811018c0917a051508c2ea476e8b8d484ff2258cc5" gracePeriod=30 Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.408646 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="proxy-httpd" containerID="cri-o://32b5987bb948af220f6dc614884d0d70effff4952f752820e24ace1496814de3" gracePeriod=30 Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.408680 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="sg-core" containerID="cri-o://1f19d41de2fca987e5f33605f4804551e737505cc0a8620d24661b23fc0c71a5" gracePeriod=30 Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.408708 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-notification-agent" containerID="cri-o://7909d0ef85877877a3bfcfbc3c08286188fc338d2d00bb05f361929f02d42d1a" gracePeriod=30 Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.425741 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.784827 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.829942 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-4q5pt"] Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.831075 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.870240 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-4q5pt"] Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.913330 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-cvh77"] Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.914603 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.936408 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djmt6\" (UniqueName: \"kubernetes.io/projected/550e98fe-cb75-49d0-9711-ac6241981523-kube-api-access-djmt6\") pod \"nova-api-db-create-4q5pt\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.937275 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550e98fe-cb75-49d0-9711-ac6241981523-operator-scripts\") pod \"nova-api-db-create-4q5pt\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:52 crc kubenswrapper[4685]: I1202 10:21:52.970065 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cvh77"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.034931 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a3e9-account-create-update-8vfbk"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.036165 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.040703 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550e98fe-cb75-49d0-9711-ac6241981523-operator-scripts\") pod \"nova-api-db-create-4q5pt\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.040755 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mppr\" (UniqueName: \"kubernetes.io/projected/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-kube-api-access-7mppr\") pod \"nova-cell0-db-create-cvh77\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.040784 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djmt6\" (UniqueName: \"kubernetes.io/projected/550e98fe-cb75-49d0-9711-ac6241981523-kube-api-access-djmt6\") pod \"nova-api-db-create-4q5pt\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.040817 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-operator-scripts\") pod \"nova-cell0-db-create-cvh77\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.041507 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550e98fe-cb75-49d0-9711-ac6241981523-operator-scripts\") pod \"nova-api-db-create-4q5pt\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.045462 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.059378 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-6zx4q"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.060586 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.072428 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a3e9-account-create-update-8vfbk"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.094334 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerID="32b5987bb948af220f6dc614884d0d70effff4952f752820e24ace1496814de3" exitCode=0 Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.095744 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerID="1f19d41de2fca987e5f33605f4804551e737505cc0a8620d24661b23fc0c71a5" exitCode=2 Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.095759 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerID="4c832e9cd3ac468237b5b0811018c0917a051508c2ea476e8b8d484ff2258cc5" exitCode=0 Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.095793 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerDied","Data":"32b5987bb948af220f6dc614884d0d70effff4952f752820e24ace1496814de3"} Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.095817 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerDied","Data":"1f19d41de2fca987e5f33605f4804551e737505cc0a8620d24661b23fc0c71a5"} Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.095830 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerDied","Data":"4c832e9cd3ac468237b5b0811018c0917a051508c2ea476e8b8d484ff2258cc5"} Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.104220 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djmt6\" (UniqueName: \"kubernetes.io/projected/550e98fe-cb75-49d0-9711-ac6241981523-kube-api-access-djmt6\") pod \"nova-api-db-create-4q5pt\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.121787 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6zx4q"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.143662 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-operator-scripts\") pod \"nova-api-a3e9-account-create-update-8vfbk\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.143930 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mppr\" (UniqueName: \"kubernetes.io/projected/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-kube-api-access-7mppr\") pod \"nova-cell0-db-create-cvh77\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.143976 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-operator-scripts\") pod \"nova-cell0-db-create-cvh77\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.144033 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmhft\" (UniqueName: \"kubernetes.io/projected/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-kube-api-access-wmhft\") pod \"nova-api-a3e9-account-create-update-8vfbk\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.144050 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj66k\" (UniqueName: \"kubernetes.io/projected/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-kube-api-access-mj66k\") pod \"nova-cell1-db-create-6zx4q\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.144077 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-operator-scripts\") pod \"nova-cell1-db-create-6zx4q\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.144930 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-operator-scripts\") pod \"nova-cell0-db-create-cvh77\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.153096 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.169448 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mppr\" (UniqueName: \"kubernetes.io/projected/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-kube-api-access-7mppr\") pod \"nova-cell0-db-create-cvh77\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.231436 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-45ee-account-create-update-xc7h6"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.232757 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.238158 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.254259 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.264242 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-45ee-account-create-update-xc7h6"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.265373 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-operator-scripts\") pod \"nova-cell0-45ee-account-create-update-xc7h6\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.265718 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj66k\" (UniqueName: \"kubernetes.io/projected/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-kube-api-access-mj66k\") pod \"nova-cell1-db-create-6zx4q\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.265770 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmhft\" (UniqueName: \"kubernetes.io/projected/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-kube-api-access-wmhft\") pod \"nova-api-a3e9-account-create-update-8vfbk\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.265827 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-operator-scripts\") pod \"nova-cell1-db-create-6zx4q\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.265880 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wngwf\" (UniqueName: \"kubernetes.io/projected/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-kube-api-access-wngwf\") pod \"nova-cell0-45ee-account-create-update-xc7h6\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.266078 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-operator-scripts\") pod \"nova-api-a3e9-account-create-update-8vfbk\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.268349 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-operator-scripts\") pod \"nova-api-a3e9-account-create-update-8vfbk\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.269266 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-operator-scripts\") pod \"nova-cell1-db-create-6zx4q\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.308046 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj66k\" (UniqueName: \"kubernetes.io/projected/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-kube-api-access-mj66k\") pod \"nova-cell1-db-create-6zx4q\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.317190 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmhft\" (UniqueName: \"kubernetes.io/projected/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-kube-api-access-wmhft\") pod \"nova-api-a3e9-account-create-update-8vfbk\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.366339 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.370111 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-operator-scripts\") pod \"nova-cell0-45ee-account-create-update-xc7h6\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.370207 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wngwf\" (UniqueName: \"kubernetes.io/projected/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-kube-api-access-wngwf\") pod \"nova-cell0-45ee-account-create-update-xc7h6\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.371127 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-operator-scripts\") pod \"nova-cell0-45ee-account-create-update-xc7h6\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.410630 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wngwf\" (UniqueName: \"kubernetes.io/projected/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-kube-api-access-wngwf\") pod \"nova-cell0-45ee-account-create-update-xc7h6\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.434183 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-787b-account-create-update-bztf9"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.436036 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.447624 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.467234 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.471960 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1106914c-d68c-4f72-b83e-58ad61aea61f-operator-scripts\") pod \"nova-cell1-787b-account-create-update-bztf9\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.472020 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckqrc\" (UniqueName: \"kubernetes.io/projected/1106914c-d68c-4f72-b83e-58ad61aea61f-kube-api-access-ckqrc\") pod \"nova-cell1-787b-account-create-update-bztf9\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.474603 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-787b-account-create-update-bztf9"] Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.564365 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.574031 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1106914c-d68c-4f72-b83e-58ad61aea61f-operator-scripts\") pod \"nova-cell1-787b-account-create-update-bztf9\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.574092 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckqrc\" (UniqueName: \"kubernetes.io/projected/1106914c-d68c-4f72-b83e-58ad61aea61f-kube-api-access-ckqrc\") pod \"nova-cell1-787b-account-create-update-bztf9\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.574760 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1106914c-d68c-4f72-b83e-58ad61aea61f-operator-scripts\") pod \"nova-cell1-787b-account-create-update-bztf9\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.596220 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckqrc\" (UniqueName: \"kubernetes.io/projected/1106914c-d68c-4f72-b83e-58ad61aea61f-kube-api-access-ckqrc\") pod \"nova-cell1-787b-account-create-update-bztf9\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:53 crc kubenswrapper[4685]: I1202 10:21:53.772513 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:21:55 crc kubenswrapper[4685]: I1202 10:21:55.117427 4685 generic.go:334] "Generic (PLEG): container finished" podID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerID="7909d0ef85877877a3bfcfbc3c08286188fc338d2d00bb05f361929f02d42d1a" exitCode=0 Dec 02 10:21:55 crc kubenswrapper[4685]: I1202 10:21:55.117797 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerDied","Data":"7909d0ef85877877a3bfcfbc3c08286188fc338d2d00bb05f361929f02d42d1a"} Dec 02 10:21:57 crc kubenswrapper[4685]: I1202 10:21:57.281632 4685 scope.go:117] "RemoveContainer" containerID="4f12fbc7596844b9a7c3f1d27e2c3551aca52a89891969adf960c001786b215f" Dec 02 10:21:57 crc kubenswrapper[4685]: I1202 10:21:57.682221 4685 scope.go:117] "RemoveContainer" containerID="4dc07f837c97e7edaa2f7bc242432ac7cb777a4b7c2578066f86b9b5e15ccc9e" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.542222 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.583706 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6zx4q"] Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.585959 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-sg-core-conf-yaml\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.586042 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-run-httpd\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.586074 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-log-httpd\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.586158 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlnrn\" (UniqueName: \"kubernetes.io/projected/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-kube-api-access-wlnrn\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.586200 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-config-data\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.586269 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-combined-ca-bundle\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.586313 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-scripts\") pod \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\" (UID: \"f4d10d7e-ad74-4171-b8e6-35c6977afb8e\") " Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.587936 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.588235 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.625539 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-scripts" (OuterVolumeSpecName: "scripts") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.625900 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-kube-api-access-wlnrn" (OuterVolumeSpecName: "kube-api-access-wlnrn") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "kube-api-access-wlnrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.679761 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-787b-account-create-update-bztf9"] Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.688378 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.688406 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlnrn\" (UniqueName: \"kubernetes.io/projected/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-kube-api-access-wlnrn\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.688418 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.688425 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.697863 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-4q5pt"] Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.732177 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cvh77"] Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.737095 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.793587 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.872331 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.897076 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.924117 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a3e9-account-create-update-8vfbk"] Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.977012 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-45ee-account-create-update-xc7h6"] Dec 02 10:21:58 crc kubenswrapper[4685]: I1202 10:21:58.998290 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-586f964d77-g52q4"] Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.154851 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-config-data" (OuterVolumeSpecName: "config-data") pod "f4d10d7e-ad74-4171-b8e6-35c6977afb8e" (UID: "f4d10d7e-ad74-4171-b8e6-35c6977afb8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.198657 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6zx4q" event={"ID":"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f","Type":"ContainerStarted","Data":"198eec57c3154c209cf854a1d228219233b2e3cbed44a2112a499023e929062d"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.200785 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d10d7e-ad74-4171-b8e6-35c6977afb8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.203056 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cvh77" event={"ID":"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac","Type":"ContainerStarted","Data":"ba5cee28f1f7e9dcc56808e56b4c7946ad317ee7d28e5b4e521f56e66039ab0f"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.208848 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-586f964d77-g52q4" event={"ID":"4ad077a4-4937-4de8-9da4-ca0f58f3adef","Type":"ContainerStarted","Data":"b70c72aa1e0a95e7daf08b6ea600ce346304c5be61edfd274b7362177351db71"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.214635 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f4d10d7e-ad74-4171-b8e6-35c6977afb8e","Type":"ContainerDied","Data":"99b2f3fd9c01bde5c8d00d66e731bd3e266606110cbeac7b2b1553fe3fe23266"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.214678 4685 scope.go:117] "RemoveContainer" containerID="32b5987bb948af220f6dc614884d0d70effff4952f752820e24ace1496814de3" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.214821 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.226255 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"62bb7fcc-d4cc-4b1d-b29a-98d6f3441731","Type":"ContainerStarted","Data":"d7a6e2f5ad2d42cd76b2d8fdccf7b0637fed00d3fae7ea944b693a2f9c6f176a"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.228920 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" event={"ID":"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa","Type":"ContainerStarted","Data":"0a3cca27833f713231c5c586f65046dac36a501ed9e56b87156e97179330ec3a"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.236450 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-787b-account-create-update-bztf9" event={"ID":"1106914c-d68c-4f72-b83e-58ad61aea61f","Type":"ContainerStarted","Data":"bc945d836491cac14c2608b7a933d740ef701b886462d0676655fec9fe2f706d"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.242701 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" event={"ID":"ad0963bd-ed5f-4a2e-b723-0824ad646bdf","Type":"ContainerStarted","Data":"83225384c627d90736b93fee41d7ffacd9ce4c4aec7a5f3f46039cd9019f40f5"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.250168 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-4q5pt" event={"ID":"550e98fe-cb75-49d0-9711-ac6241981523","Type":"ContainerStarted","Data":"0506c28ac8eb17b07271a9665c93ed3210274e94eee6f058a02d850d56ce485a"} Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.264400 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.228203553 podStartE2EDuration="18.264370831s" podCreationTimestamp="2025-12-02 10:21:41 +0000 UTC" firstStartedPulling="2025-12-02 10:21:42.46671295 +0000 UTC m=+1194.838487104" lastFinishedPulling="2025-12-02 10:21:58.502880228 +0000 UTC m=+1210.874654382" observedRunningTime="2025-12-02 10:21:59.243085598 +0000 UTC m=+1211.614859752" watchObservedRunningTime="2025-12-02 10:21:59.264370831 +0000 UTC m=+1211.636144985" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.324314 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.362619 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.372762 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:59 crc kubenswrapper[4685]: E1202 10:21:59.373285 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-central-agent" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373305 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-central-agent" Dec 02 10:21:59 crc kubenswrapper[4685]: E1202 10:21:59.373322 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-notification-agent" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373329 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-notification-agent" Dec 02 10:21:59 crc kubenswrapper[4685]: E1202 10:21:59.373349 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="sg-core" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373356 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="sg-core" Dec 02 10:21:59 crc kubenswrapper[4685]: E1202 10:21:59.373373 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="proxy-httpd" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373380 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="proxy-httpd" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373619 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-central-agent" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373639 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="sg-core" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373649 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="ceilometer-notification-agent" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.373658 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" containerName="proxy-httpd" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.380045 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.384099 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.384286 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.389508 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515045 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-scripts\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515400 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-run-httpd\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515428 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-log-httpd\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515457 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgw47\" (UniqueName: \"kubernetes.io/projected/2110513d-db84-444d-8b69-22a03e65c800-kube-api-access-zgw47\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515489 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-config-data\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515606 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.515694 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.516043 4685 scope.go:117] "RemoveContainer" containerID="1f19d41de2fca987e5f33605f4804551e737505cc0a8620d24661b23fc0c71a5" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.564956 4685 scope.go:117] "RemoveContainer" containerID="7909d0ef85877877a3bfcfbc3c08286188fc338d2d00bb05f361929f02d42d1a" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622515 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-config-data\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622658 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622736 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622797 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-scripts\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622824 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-run-httpd\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622841 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-log-httpd\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622865 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgw47\" (UniqueName: \"kubernetes.io/projected/2110513d-db84-444d-8b69-22a03e65c800-kube-api-access-zgw47\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.622985 4685 scope.go:117] "RemoveContainer" containerID="4c832e9cd3ac468237b5b0811018c0917a051508c2ea476e8b8d484ff2258cc5" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.626318 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-run-httpd\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.628213 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-log-httpd\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.639800 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-scripts\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.640485 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-config-data\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.647188 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.653539 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgw47\" (UniqueName: \"kubernetes.io/projected/2110513d-db84-444d-8b69-22a03e65c800-kube-api-access-zgw47\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.663866 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.869756 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:21:59 crc kubenswrapper[4685]: I1202 10:21:59.950747 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4d10d7e-ad74-4171-b8e6-35c6977afb8e" path="/var/lib/kubelet/pods/f4d10d7e-ad74-4171-b8e6-35c6977afb8e/volumes" Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.333161 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-586f964d77-g52q4" event={"ID":"4ad077a4-4937-4de8-9da4-ca0f58f3adef","Type":"ContainerStarted","Data":"89de659a8c9840fd040bd7d5ec3ae8d86eb8ecee76aea4170d4262dda4ecfcb7"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.333504 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-586f964d77-g52q4" event={"ID":"4ad077a4-4937-4de8-9da4-ca0f58f3adef","Type":"ContainerStarted","Data":"40d3553471eec9bcfc2ee25d59e6669dab55a8e3a848ffe18d24ee5b55046768"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.335077 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.335116 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.339736 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" event={"ID":"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa","Type":"ContainerStarted","Data":"f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.349902 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" event={"ID":"ad0963bd-ed5f-4a2e-b723-0824ad646bdf","Type":"ContainerStarted","Data":"64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.359772 4685 generic.go:334] "Generic (PLEG): container finished" podID="1106914c-d68c-4f72-b83e-58ad61aea61f" containerID="dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1" exitCode=0 Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.359841 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-787b-account-create-update-bztf9" event={"ID":"1106914c-d68c-4f72-b83e-58ad61aea61f","Type":"ContainerDied","Data":"dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.364181 4685 generic.go:334] "Generic (PLEG): container finished" podID="550e98fe-cb75-49d0-9711-ac6241981523" containerID="bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f" exitCode=0 Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.364428 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-4q5pt" event={"ID":"550e98fe-cb75-49d0-9711-ac6241981523","Type":"ContainerDied","Data":"bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.368404 4685 generic.go:334] "Generic (PLEG): container finished" podID="e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" containerID="0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221" exitCode=0 Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.368648 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6zx4q" event={"ID":"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f","Type":"ContainerDied","Data":"0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.372679 4685 generic.go:334] "Generic (PLEG): container finished" podID="0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" containerID="3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e" exitCode=0 Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.373012 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cvh77" event={"ID":"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac","Type":"ContainerDied","Data":"3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e"} Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.388325 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-586f964d77-g52q4" podStartSLOduration=11.388277694 podStartE2EDuration="11.388277694s" podCreationTimestamp="2025-12-02 10:21:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:00.361156671 +0000 UTC m=+1212.732930835" watchObservedRunningTime="2025-12-02 10:22:00.388277694 +0000 UTC m=+1212.760051848" Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.418548 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" podStartSLOduration=7.418527432 podStartE2EDuration="7.418527432s" podCreationTimestamp="2025-12-02 10:21:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:00.407294254 +0000 UTC m=+1212.779068428" watchObservedRunningTime="2025-12-02 10:22:00.418527432 +0000 UTC m=+1212.790301586" Dec 02 10:22:00 crc kubenswrapper[4685]: I1202 10:22:00.589538 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.374201 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-5cb9cd6bb4-c4gq9" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.144:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.144:8443: connect: connection refused" Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.374604 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.385046 4685 generic.go:334] "Generic (PLEG): container finished" podID="02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" containerID="f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3" exitCode=0 Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.385120 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" event={"ID":"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa","Type":"ContainerDied","Data":"f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3"} Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.388850 4685 generic.go:334] "Generic (PLEG): container finished" podID="ad0963bd-ed5f-4a2e-b723-0824ad646bdf" containerID="64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7" exitCode=0 Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.388976 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" event={"ID":"ad0963bd-ed5f-4a2e-b723-0824ad646bdf","Type":"ContainerDied","Data":"64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7"} Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.395137 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerStarted","Data":"af4cfc2f56b56499502cb1d0f6738f27fd05b7500bd948cfe55ffc34be8e9205"} Dec 02 10:22:01 crc kubenswrapper[4685]: I1202 10:22:01.395178 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerStarted","Data":"3fc6b4e6696b184e8c36e572a2b5090b1f89938c0e749e5f161ad411b262e43b"} Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.048537 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.171578 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550e98fe-cb75-49d0-9711-ac6241981523-operator-scripts\") pod \"550e98fe-cb75-49d0-9711-ac6241981523\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.171728 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djmt6\" (UniqueName: \"kubernetes.io/projected/550e98fe-cb75-49d0-9711-ac6241981523-kube-api-access-djmt6\") pod \"550e98fe-cb75-49d0-9711-ac6241981523\" (UID: \"550e98fe-cb75-49d0-9711-ac6241981523\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.173142 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/550e98fe-cb75-49d0-9711-ac6241981523-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "550e98fe-cb75-49d0-9711-ac6241981523" (UID: "550e98fe-cb75-49d0-9711-ac6241981523"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.174679 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/550e98fe-cb75-49d0-9711-ac6241981523-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.177919 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/550e98fe-cb75-49d0-9711-ac6241981523-kube-api-access-djmt6" (OuterVolumeSpecName: "kube-api-access-djmt6") pod "550e98fe-cb75-49d0-9711-ac6241981523" (UID: "550e98fe-cb75-49d0-9711-ac6241981523"). InnerVolumeSpecName "kube-api-access-djmt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.190873 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.194846 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.195212 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.211022 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279537 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-operator-scripts\") pod \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279647 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mppr\" (UniqueName: \"kubernetes.io/projected/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-kube-api-access-7mppr\") pod \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\" (UID: \"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279704 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1106914c-d68c-4f72-b83e-58ad61aea61f-operator-scripts\") pod \"1106914c-d68c-4f72-b83e-58ad61aea61f\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279743 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmhft\" (UniqueName: \"kubernetes.io/projected/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-kube-api-access-wmhft\") pod \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279769 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj66k\" (UniqueName: \"kubernetes.io/projected/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-kube-api-access-mj66k\") pod \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279788 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-operator-scripts\") pod \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\" (UID: \"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279832 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckqrc\" (UniqueName: \"kubernetes.io/projected/1106914c-d68c-4f72-b83e-58ad61aea61f-kube-api-access-ckqrc\") pod \"1106914c-d68c-4f72-b83e-58ad61aea61f\" (UID: \"1106914c-d68c-4f72-b83e-58ad61aea61f\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.279878 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-operator-scripts\") pod \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\" (UID: \"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa\") " Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.280218 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djmt6\" (UniqueName: \"kubernetes.io/projected/550e98fe-cb75-49d0-9711-ac6241981523-kube-api-access-djmt6\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.281627 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" (UID: "e0ce999c-4e4b-413e-8e21-08b9c6e40b3f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.281984 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" (UID: "02632b3a-ff2c-4d71-9bf9-ed781da5b3fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.285216 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1106914c-d68c-4f72-b83e-58ad61aea61f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1106914c-d68c-4f72-b83e-58ad61aea61f" (UID: "1106914c-d68c-4f72-b83e-58ad61aea61f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.285586 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-kube-api-access-wmhft" (OuterVolumeSpecName: "kube-api-access-wmhft") pod "02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" (UID: "02632b3a-ff2c-4d71-9bf9-ed781da5b3fa"). InnerVolumeSpecName "kube-api-access-wmhft". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.285696 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" (UID: "0b6eac8f-13f6-4a05-87fb-1ee6c96186ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.286358 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-kube-api-access-mj66k" (OuterVolumeSpecName: "kube-api-access-mj66k") pod "e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" (UID: "e0ce999c-4e4b-413e-8e21-08b9c6e40b3f"). InnerVolumeSpecName "kube-api-access-mj66k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.289751 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-kube-api-access-7mppr" (OuterVolumeSpecName: "kube-api-access-7mppr") pod "0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" (UID: "0b6eac8f-13f6-4a05-87fb-1ee6c96186ac"). InnerVolumeSpecName "kube-api-access-7mppr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.291469 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1106914c-d68c-4f72-b83e-58ad61aea61f-kube-api-access-ckqrc" (OuterVolumeSpecName: "kube-api-access-ckqrc") pod "1106914c-d68c-4f72-b83e-58ad61aea61f" (UID: "1106914c-d68c-4f72-b83e-58ad61aea61f"). InnerVolumeSpecName "kube-api-access-ckqrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381623 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1106914c-d68c-4f72-b83e-58ad61aea61f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381668 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmhft\" (UniqueName: \"kubernetes.io/projected/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-kube-api-access-wmhft\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381692 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj66k\" (UniqueName: \"kubernetes.io/projected/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-kube-api-access-mj66k\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381701 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381709 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckqrc\" (UniqueName: \"kubernetes.io/projected/1106914c-d68c-4f72-b83e-58ad61aea61f-kube-api-access-ckqrc\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381717 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381725 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.381733 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mppr\" (UniqueName: \"kubernetes.io/projected/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac-kube-api-access-7mppr\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.405905 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-4q5pt" event={"ID":"550e98fe-cb75-49d0-9711-ac6241981523","Type":"ContainerDied","Data":"0506c28ac8eb17b07271a9665c93ed3210274e94eee6f058a02d850d56ce485a"} Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.405952 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0506c28ac8eb17b07271a9665c93ed3210274e94eee6f058a02d850d56ce485a" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.405926 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-4q5pt" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.407834 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6zx4q" event={"ID":"e0ce999c-4e4b-413e-8e21-08b9c6e40b3f","Type":"ContainerDied","Data":"198eec57c3154c209cf854a1d228219233b2e3cbed44a2112a499023e929062d"} Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.407881 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="198eec57c3154c209cf854a1d228219233b2e3cbed44a2112a499023e929062d" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.407925 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6zx4q" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.437033 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cvh77" event={"ID":"0b6eac8f-13f6-4a05-87fb-1ee6c96186ac","Type":"ContainerDied","Data":"ba5cee28f1f7e9dcc56808e56b4c7946ad317ee7d28e5b4e521f56e66039ab0f"} Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.437080 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba5cee28f1f7e9dcc56808e56b4c7946ad317ee7d28e5b4e521f56e66039ab0f" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.437156 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cvh77" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.439375 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" event={"ID":"02632b3a-ff2c-4d71-9bf9-ed781da5b3fa","Type":"ContainerDied","Data":"0a3cca27833f713231c5c586f65046dac36a501ed9e56b87156e97179330ec3a"} Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.439410 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a3cca27833f713231c5c586f65046dac36a501ed9e56b87156e97179330ec3a" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.439479 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a3e9-account-create-update-8vfbk" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.442330 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-787b-account-create-update-bztf9" event={"ID":"1106914c-d68c-4f72-b83e-58ad61aea61f","Type":"ContainerDied","Data":"bc945d836491cac14c2608b7a933d740ef701b886462d0676655fec9fe2f706d"} Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.442365 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc945d836491cac14c2608b7a933d740ef701b886462d0676655fec9fe2f706d" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.442707 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-787b-account-create-update-bztf9" Dec 02 10:22:02 crc kubenswrapper[4685]: I1202 10:22:02.453328 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerStarted","Data":"33ecd5300d4b953026ac01ecd50677f80ffc417ee7e523d86ccf57f435d9ddd7"} Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.043847 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.228857 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-operator-scripts\") pod \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.229012 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wngwf\" (UniqueName: \"kubernetes.io/projected/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-kube-api-access-wngwf\") pod \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\" (UID: \"ad0963bd-ed5f-4a2e-b723-0824ad646bdf\") " Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.229802 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad0963bd-ed5f-4a2e-b723-0824ad646bdf" (UID: "ad0963bd-ed5f-4a2e-b723-0824ad646bdf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.235362 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-kube-api-access-wngwf" (OuterVolumeSpecName: "kube-api-access-wngwf") pod "ad0963bd-ed5f-4a2e-b723-0824ad646bdf" (UID: "ad0963bd-ed5f-4a2e-b723-0824ad646bdf"). InnerVolumeSpecName "kube-api-access-wngwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.330911 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wngwf\" (UniqueName: \"kubernetes.io/projected/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-kube-api-access-wngwf\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.330943 4685 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad0963bd-ed5f-4a2e-b723-0824ad646bdf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.462094 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.462118 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-45ee-account-create-update-xc7h6" event={"ID":"ad0963bd-ed5f-4a2e-b723-0824ad646bdf","Type":"ContainerDied","Data":"83225384c627d90736b93fee41d7ffacd9ce4c4aec7a5f3f46039cd9019f40f5"} Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.462184 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83225384c627d90736b93fee41d7ffacd9ce4c4aec7a5f3f46039cd9019f40f5" Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.464396 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerStarted","Data":"8d7022e180aa95446ae0ba2900728f15c003af040d01adbbbb305d26deb24478"} Dec 02 10:22:03 crc kubenswrapper[4685]: I1202 10:22:03.551027 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.507233 4685 generic.go:334] "Generic (PLEG): container finished" podID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerID="032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e" exitCode=137 Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.507865 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c2bfabf-1b91-414b-883d-6a748753d3a5","Type":"ContainerDied","Data":"032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e"} Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.584747 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666402 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-combined-ca-bundle\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666479 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c2bfabf-1b91-414b-883d-6a748753d3a5-logs\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666502 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-scripts\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666584 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c676h\" (UniqueName: \"kubernetes.io/projected/9c2bfabf-1b91-414b-883d-6a748753d3a5-kube-api-access-c676h\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666608 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666681 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data-custom\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.666704 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c2bfabf-1b91-414b-883d-6a748753d3a5-etc-machine-id\") pod \"9c2bfabf-1b91-414b-883d-6a748753d3a5\" (UID: \"9c2bfabf-1b91-414b-883d-6a748753d3a5\") " Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.667077 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c2bfabf-1b91-414b-883d-6a748753d3a5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.669275 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c2bfabf-1b91-414b-883d-6a748753d3a5-logs" (OuterVolumeSpecName: "logs") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.674382 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.674489 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-scripts" (OuterVolumeSpecName: "scripts") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.677974 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c2bfabf-1b91-414b-883d-6a748753d3a5-kube-api-access-c676h" (OuterVolumeSpecName: "kube-api-access-c676h") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "kube-api-access-c676h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.729444 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.740197 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data" (OuterVolumeSpecName: "config-data") pod "9c2bfabf-1b91-414b-883d-6a748753d3a5" (UID: "9c2bfabf-1b91-414b-883d-6a748753d3a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769817 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769857 4685 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769871 4685 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c2bfabf-1b91-414b-883d-6a748753d3a5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769881 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769893 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c2bfabf-1b91-414b-883d-6a748753d3a5-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769902 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c2bfabf-1b91-414b-883d-6a748753d3a5-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.769910 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c676h\" (UniqueName: \"kubernetes.io/projected/9c2bfabf-1b91-414b-883d-6a748753d3a5-kube-api-access-c676h\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:04 crc kubenswrapper[4685]: I1202 10:22:04.800743 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.519205 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9c2bfabf-1b91-414b-883d-6a748753d3a5","Type":"ContainerDied","Data":"7d12d455e5ed20cee90e02aaca94e507393c39ac41364a40d872df602ccde07c"} Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.519270 4685 scope.go:117] "RemoveContainer" containerID="032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.519221 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.521639 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerStarted","Data":"a70fc088eb0ab55788ef20d78cc324aba7f90fd30fe5869290bbda89ee2fadcc"} Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.521837 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.521815 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-central-agent" containerID="cri-o://af4cfc2f56b56499502cb1d0f6738f27fd05b7500bd948cfe55ffc34be8e9205" gracePeriod=30 Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.521873 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="proxy-httpd" containerID="cri-o://a70fc088eb0ab55788ef20d78cc324aba7f90fd30fe5869290bbda89ee2fadcc" gracePeriod=30 Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.521930 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="sg-core" containerID="cri-o://8d7022e180aa95446ae0ba2900728f15c003af040d01adbbbb305d26deb24478" gracePeriod=30 Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.521973 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-notification-agent" containerID="cri-o://33ecd5300d4b953026ac01ecd50677f80ffc417ee7e523d86ccf57f435d9ddd7" gracePeriod=30 Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.576090 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.867201593 podStartE2EDuration="6.576062029s" podCreationTimestamp="2025-12-02 10:21:59 +0000 UTC" firstStartedPulling="2025-12-02 10:22:00.599555336 +0000 UTC m=+1212.971329490" lastFinishedPulling="2025-12-02 10:22:04.308415772 +0000 UTC m=+1216.680189926" observedRunningTime="2025-12-02 10:22:05.567199016 +0000 UTC m=+1217.938973170" watchObservedRunningTime="2025-12-02 10:22:05.576062029 +0000 UTC m=+1217.947836193" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.581276 4685 scope.go:117] "RemoveContainer" containerID="a5aa81db72266fb451e85c33fa26a9287020cdae878317d16fbfcb6f21c11728" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.627087 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.659714 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.682621 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683044 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="550e98fe-cb75-49d0-9711-ac6241981523" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683066 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="550e98fe-cb75-49d0-9711-ac6241981523" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683089 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683098 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683122 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683131 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683146 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api-log" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683153 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api-log" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683166 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683175 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683194 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1106914c-d68c-4f72-b83e-58ad61aea61f" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683201 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1106914c-d68c-4f72-b83e-58ad61aea61f" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683215 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683223 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: E1202 10:22:05.683240 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad0963bd-ed5f-4a2e-b723-0824ad646bdf" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683247 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad0963bd-ed5f-4a2e-b723-0824ad646bdf" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683436 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683454 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683465 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="550e98fe-cb75-49d0-9711-ac6241981523" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683476 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683491 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad0963bd-ed5f-4a2e-b723-0824ad646bdf" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683507 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" containerName="cinder-api-log" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683519 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" containerName="mariadb-database-create" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.683531 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1106914c-d68c-4f72-b83e-58ad61aea61f" containerName="mariadb-account-create-update" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.684487 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.688726 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.688921 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.689048 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.710352 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809715 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7nr4\" (UniqueName: \"kubernetes.io/projected/62f488e9-6105-488d-bb01-f612db6e1fae-kube-api-access-l7nr4\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809772 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-public-tls-certs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809813 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-config-data-custom\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809841 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809866 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809917 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-config-data\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809938 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f488e9-6105-488d-bb01-f612db6e1fae-logs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.809983 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62f488e9-6105-488d-bb01-f612db6e1fae-etc-machine-id\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.810007 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-scripts\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.910589 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c2bfabf-1b91-414b-883d-6a748753d3a5" path="/var/lib/kubelet/pods/9c2bfabf-1b91-414b-883d-6a748753d3a5/volumes" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912494 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-public-tls-certs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912541 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-config-data-custom\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912627 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912649 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912727 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-config-data\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912767 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f488e9-6105-488d-bb01-f612db6e1fae-logs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912803 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62f488e9-6105-488d-bb01-f612db6e1fae-etc-machine-id\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912824 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-scripts\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.912932 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7nr4\" (UniqueName: \"kubernetes.io/projected/62f488e9-6105-488d-bb01-f612db6e1fae-kube-api-access-l7nr4\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.913697 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62f488e9-6105-488d-bb01-f612db6e1fae-logs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.913743 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62f488e9-6105-488d-bb01-f612db6e1fae-etc-machine-id\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.919519 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.920385 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-scripts\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.924488 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-config-data\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.927025 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-config-data-custom\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.929145 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.929492 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/62f488e9-6105-488d-bb01-f612db6e1fae-public-tls-certs\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:05 crc kubenswrapper[4685]: I1202 10:22:05.938914 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7nr4\" (UniqueName: \"kubernetes.io/projected/62f488e9-6105-488d-bb01-f612db6e1fae-kube-api-access-l7nr4\") pod \"cinder-api-0\" (UID: \"62f488e9-6105-488d-bb01-f612db6e1fae\") " pod="openstack/cinder-api-0" Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.004846 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.478378 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.533204 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62f488e9-6105-488d-bb01-f612db6e1fae","Type":"ContainerStarted","Data":"139d57a8214401396ff5e91e015af83963b6a18441b43731e6ec3ce442788394"} Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.536187 4685 generic.go:334] "Generic (PLEG): container finished" podID="2110513d-db84-444d-8b69-22a03e65c800" containerID="a70fc088eb0ab55788ef20d78cc324aba7f90fd30fe5869290bbda89ee2fadcc" exitCode=0 Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.536252 4685 generic.go:334] "Generic (PLEG): container finished" podID="2110513d-db84-444d-8b69-22a03e65c800" containerID="8d7022e180aa95446ae0ba2900728f15c003af040d01adbbbb305d26deb24478" exitCode=2 Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.536261 4685 generic.go:334] "Generic (PLEG): container finished" podID="2110513d-db84-444d-8b69-22a03e65c800" containerID="33ecd5300d4b953026ac01ecd50677f80ffc417ee7e523d86ccf57f435d9ddd7" exitCode=0 Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.536235 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerDied","Data":"a70fc088eb0ab55788ef20d78cc324aba7f90fd30fe5869290bbda89ee2fadcc"} Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.536314 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerDied","Data":"8d7022e180aa95446ae0ba2900728f15c003af040d01adbbbb305d26deb24478"} Dec 02 10:22:06 crc kubenswrapper[4685]: I1202 10:22:06.536325 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerDied","Data":"33ecd5300d4b953026ac01ecd50677f80ffc417ee7e523d86ccf57f435d9ddd7"} Dec 02 10:22:07 crc kubenswrapper[4685]: I1202 10:22:07.581769 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62f488e9-6105-488d-bb01-f612db6e1fae","Type":"ContainerStarted","Data":"cf361d163571d606396312a0829fda9dfe78b41daea083872ffe183c03265fbe"} Dec 02 10:22:08 crc kubenswrapper[4685]: I1202 10:22:08.787030 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62f488e9-6105-488d-bb01-f612db6e1fae","Type":"ContainerStarted","Data":"7c0e8028cdc610722b3271cd7fc8d14c726863440c685b4d5ed3f5b483cb2ace"} Dec 02 10:22:08 crc kubenswrapper[4685]: I1202 10:22:08.788324 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.179381 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.179359955 podStartE2EDuration="4.179359955s" podCreationTimestamp="2025-12-02 10:22:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:09.157513427 +0000 UTC m=+1221.529287601" watchObservedRunningTime="2025-12-02 10:22:09.179359955 +0000 UTC m=+1221.551134109" Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.387388 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice/crio-ba5cee28f1f7e9dcc56808e56b4c7946ad317ee7d28e5b4e521f56e66039ab0f": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice/crio-ba5cee28f1f7e9dcc56808e56b4c7946ad317ee7d28e5b4e521f56e66039ab0f: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.387926 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0ce999c_4e4b_413e_8e21_08b9c6e40b3f.slice/crio-conmon-0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0ce999c_4e4b_413e_8e21_08b9c6e40b3f.slice/crio-conmon-0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.387943 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0ce999c_4e4b_413e_8e21_08b9c6e40b3f.slice/crio-0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0ce999c_4e4b_413e_8e21_08b9c6e40b3f.slice/crio-0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.387962 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice/crio-0a3cca27833f713231c5c586f65046dac36a501ed9e56b87156e97179330ec3a": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice/crio-0a3cca27833f713231c5c586f65046dac36a501ed9e56b87156e97179330ec3a: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.387975 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1106914c_d68c_4f72_b83e_58ad61aea61f.slice/crio-conmon-dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1106914c_d68c_4f72_b83e_58ad61aea61f.slice/crio-conmon-dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.387990 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice/crio-83225384c627d90736b93fee41d7ffacd9ce4c4aec7a5f3f46039cd9019f40f5": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice/crio-83225384c627d90736b93fee41d7ffacd9ce4c4aec7a5f3f46039cd9019f40f5: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388105 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1106914c_d68c_4f72_b83e_58ad61aea61f.slice/crio-dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1106914c_d68c_4f72_b83e_58ad61aea61f.slice/crio-dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388127 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550e98fe_cb75_49d0_9711_ac6241981523.slice/crio-conmon-bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550e98fe_cb75_49d0_9711_ac6241981523.slice/crio-conmon-bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388139 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice/crio-conmon-3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice/crio-conmon-3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388153 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice/crio-3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice/crio-3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388167 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550e98fe_cb75_49d0_9711_ac6241981523.slice/crio-bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550e98fe_cb75_49d0_9711_ac6241981523.slice/crio-bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388209 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice/crio-conmon-f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice/crio-conmon-f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388311 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice/crio-conmon-64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice/crio-conmon-64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388572 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice/crio-f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice/crio-f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: W1202 10:22:09.388593 4685 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice/crio-64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice/crio-64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7.scope: no such file or directory Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.515041 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hddhn"] Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.516155 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.537918 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hddhn"] Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.538068 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.541226 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-gclvq" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.541528 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.616398 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-scripts\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.616529 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-config-data\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.616619 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.616771 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb7vj\" (UniqueName: \"kubernetes.io/projected/469cbb32-2f63-4f4b-813d-e07e778e0eaf-kube-api-access-pb7vj\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.720484 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb7vj\" (UniqueName: \"kubernetes.io/projected/469cbb32-2f63-4f4b-813d-e07e778e0eaf-kube-api-access-pb7vj\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.720554 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-scripts\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.720619 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-config-data\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.720652 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.729267 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-scripts\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.729649 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-config-data\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.731483 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.738314 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb7vj\" (UniqueName: \"kubernetes.io/projected/469cbb32-2f63-4f4b-813d-e07e778e0eaf-kube-api-access-pb7vj\") pod \"nova-cell0-conductor-db-sync-hddhn\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.785717 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-586f964d77-g52q4" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.839839 4685 generic.go:334] "Generic (PLEG): container finished" podID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerID="783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3" exitCode=137 Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.839936 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb9cd6bb4-c4gq9" event={"ID":"04ab41a3-3d00-4fbd-8e1f-2995658eba9c","Type":"ContainerDied","Data":"783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3"} Dec 02 10:22:09 crc kubenswrapper[4685]: E1202 10:22:09.855500 4685 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550e98fe_cb75_49d0_9711_ac6241981523.slice/crio-0506c28ac8eb17b07271a9665c93ed3210274e94eee6f058a02d850d56ce485a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6eac8f_13f6_4a05_87fb_1ee6c96186ac.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c2bfabf_1b91_414b_883d_6a748753d3a5.slice/crio-conmon-032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0ce999c_4e4b_413e_8e21_08b9c6e40b3f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c2bfabf_1b91_414b_883d_6a748753d3a5.slice/crio-032c6d989fcac5528c22174262f1ca9889536111508a817bf7a18fb3c88d142e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c2bfabf_1b91_414b_883d_6a748753d3a5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04ab41a3_3d00_4fbd_8e1f_2995658eba9c.slice/crio-conmon-783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c2bfabf_1b91_414b_883d_6a748753d3a5.slice/crio-7d12d455e5ed20cee90e02aaca94e507393c39ac41364a40d872df602ccde07c\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1106914c_d68c_4f72_b83e_58ad61aea61f.slice/crio-bc945d836491cac14c2608b7a933d740ef701b886462d0676655fec9fe2f706d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad0963bd_ed5f_4a2e_b723_0824ad646bdf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04ab41a3_3d00_4fbd_8e1f_2995658eba9c.slice/crio-783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1106914c_d68c_4f72_b83e_58ad61aea61f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02632b3a_ff2c_4d71_9bf9_ed781da5b3fa.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0ce999c_4e4b_413e_8e21_08b9c6e40b3f.slice/crio-198eec57c3154c209cf854a1d228219233b2e3cbed44a2112a499023e929062d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod550e98fe_cb75_49d0_9711_ac6241981523.slice\": RecentStats: unable to find data in memory cache]" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.859727 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.912815 4685 generic.go:334] "Generic (PLEG): container finished" podID="2110513d-db84-444d-8b69-22a03e65c800" containerID="af4cfc2f56b56499502cb1d0f6738f27fd05b7500bd948cfe55ffc34be8e9205" exitCode=0 Dec 02 10:22:09 crc kubenswrapper[4685]: I1202 10:22:09.938781 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerDied","Data":"af4cfc2f56b56499502cb1d0f6738f27fd05b7500bd948cfe55ffc34be8e9205"} Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.062678 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.086107 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240158 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-logs\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240472 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-config-data\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240511 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-logs" (OuterVolumeSpecName: "logs") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240536 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-secret-key\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240579 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-combined-ca-bundle\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240634 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-scripts\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240654 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-scripts\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240670 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-config-data\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240711 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-combined-ca-bundle\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240737 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-run-httpd\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240774 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whbg4\" (UniqueName: \"kubernetes.io/projected/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-kube-api-access-whbg4\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240812 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-log-httpd\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240848 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-sg-core-conf-yaml\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240903 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-tls-certs\") pod \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\" (UID: \"04ab41a3-3d00-4fbd-8e1f-2995658eba9c\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.240938 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgw47\" (UniqueName: \"kubernetes.io/projected/2110513d-db84-444d-8b69-22a03e65c800-kube-api-access-zgw47\") pod \"2110513d-db84-444d-8b69-22a03e65c800\" (UID: \"2110513d-db84-444d-8b69-22a03e65c800\") " Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.241293 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.246510 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.246782 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.258956 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.259099 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2110513d-db84-444d-8b69-22a03e65c800-kube-api-access-zgw47" (OuterVolumeSpecName: "kube-api-access-zgw47") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "kube-api-access-zgw47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.268110 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-scripts" (OuterVolumeSpecName: "scripts") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.323325 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-config-data" (OuterVolumeSpecName: "config-data") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.328693 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-kube-api-access-whbg4" (OuterVolumeSpecName: "kube-api-access-whbg4") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "kube-api-access-whbg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.338020 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343267 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343367 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgw47\" (UniqueName: \"kubernetes.io/projected/2110513d-db84-444d-8b69-22a03e65c800-kube-api-access-zgw47\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343440 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343497 4685 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343549 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343626 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343698 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whbg4\" (UniqueName: \"kubernetes.io/projected/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-kube-api-access-whbg4\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.343750 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2110513d-db84-444d-8b69-22a03e65c800-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.344081 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-scripts" (OuterVolumeSpecName: "scripts") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.371578 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.404947 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04ab41a3-3d00-4fbd-8e1f-2995658eba9c" (UID: "04ab41a3-3d00-4fbd-8e1f-2995658eba9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.407351 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.448034 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.448078 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.448093 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.448107 4685 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/04ab41a3-3d00-4fbd-8e1f-2995658eba9c-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.455153 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hddhn"] Dec 02 10:22:10 crc kubenswrapper[4685]: W1202 10:22:10.462488 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod469cbb32_2f63_4f4b_813d_e07e778e0eaf.slice/crio-5b31a1c3e41680efc9afe7849f2f9fd94ac7237af905157743c8227cc73d8deb WatchSource:0}: Error finding container 5b31a1c3e41680efc9afe7849f2f9fd94ac7237af905157743c8227cc73d8deb: Status 404 returned error can't find the container with id 5b31a1c3e41680efc9afe7849f2f9fd94ac7237af905157743c8227cc73d8deb Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.488937 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-config-data" (OuterVolumeSpecName: "config-data") pod "2110513d-db84-444d-8b69-22a03e65c800" (UID: "2110513d-db84-444d-8b69-22a03e65c800"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.549871 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2110513d-db84-444d-8b69-22a03e65c800-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.926469 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5cb9cd6bb4-c4gq9" event={"ID":"04ab41a3-3d00-4fbd-8e1f-2995658eba9c","Type":"ContainerDied","Data":"ae8622a1340c48e9a93f900675a3c7c247040c99a622c3406731f00f8f1a40c7"} Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.926491 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5cb9cd6bb4-c4gq9" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.926553 4685 scope.go:117] "RemoveContainer" containerID="e30782b4f5791754d39c2ed2d23d4a30b74d4fd535fd8045c89bbffd9d86c01d" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.930261 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2110513d-db84-444d-8b69-22a03e65c800","Type":"ContainerDied","Data":"3fc6b4e6696b184e8c36e572a2b5090b1f89938c0e749e5f161ad411b262e43b"} Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.930626 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.936373 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hddhn" event={"ID":"469cbb32-2f63-4f4b-813d-e07e778e0eaf","Type":"ContainerStarted","Data":"5b31a1c3e41680efc9afe7849f2f9fd94ac7237af905157743c8227cc73d8deb"} Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.973190 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5cb9cd6bb4-c4gq9"] Dec 02 10:22:10 crc kubenswrapper[4685]: I1202 10:22:10.986368 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5cb9cd6bb4-c4gq9"] Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.000180 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.009222 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.030518 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:11 crc kubenswrapper[4685]: E1202 10:22:11.030901 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-notification-agent" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.030916 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-notification-agent" Dec 02 10:22:11 crc kubenswrapper[4685]: E1202 10:22:11.030931 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="proxy-httpd" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.030939 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="proxy-httpd" Dec 02 10:22:11 crc kubenswrapper[4685]: E1202 10:22:11.030964 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon-log" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.030970 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon-log" Dec 02 10:22:11 crc kubenswrapper[4685]: E1202 10:22:11.030981 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.030988 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" Dec 02 10:22:11 crc kubenswrapper[4685]: E1202 10:22:11.031003 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-central-agent" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031009 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-central-agent" Dec 02 10:22:11 crc kubenswrapper[4685]: E1202 10:22:11.031022 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="sg-core" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031028 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="sg-core" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031192 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="proxy-httpd" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031208 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-central-agent" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031219 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon-log" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031230 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="sg-core" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031238 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" containerName="horizon" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.031265 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2110513d-db84-444d-8b69-22a03e65c800" containerName="ceilometer-notification-agent" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.032806 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.038914 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.043047 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.049259 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.126273 4685 scope.go:117] "RemoveContainer" containerID="783c44c9fca5d937d5d9b266ba27638bc6b105439e274191f3b13648452d42f3" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.149009 4685 scope.go:117] "RemoveContainer" containerID="a70fc088eb0ab55788ef20d78cc324aba7f90fd30fe5869290bbda89ee2fadcc" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.159699 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-log-httpd\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.159755 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-run-httpd\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.160114 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljc68\" (UniqueName: \"kubernetes.io/projected/887a10dd-943c-46e0-bb8d-4648208f8877-kube-api-access-ljc68\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.160241 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.160276 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-scripts\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.160395 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.160572 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-config-data\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.165511 4685 scope.go:117] "RemoveContainer" containerID="8d7022e180aa95446ae0ba2900728f15c003af040d01adbbbb305d26deb24478" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.183924 4685 scope.go:117] "RemoveContainer" containerID="33ecd5300d4b953026ac01ecd50677f80ffc417ee7e523d86ccf57f435d9ddd7" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.217117 4685 scope.go:117] "RemoveContainer" containerID="af4cfc2f56b56499502cb1d0f6738f27fd05b7500bd948cfe55ffc34be8e9205" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.262417 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.262660 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-config-data\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.262765 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-log-httpd\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.262871 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-run-httpd\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.263009 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljc68\" (UniqueName: \"kubernetes.io/projected/887a10dd-943c-46e0-bb8d-4648208f8877-kube-api-access-ljc68\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.263120 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.263204 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-scripts\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.264186 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-log-httpd\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.264826 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-run-httpd\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.268507 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.270290 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-config-data\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.270714 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-scripts\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.272650 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.286132 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljc68\" (UniqueName: \"kubernetes.io/projected/887a10dd-943c-46e0-bb8d-4648208f8877-kube-api-access-ljc68\") pod \"ceilometer-0\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.355478 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.913650 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04ab41a3-3d00-4fbd-8e1f-2995658eba9c" path="/var/lib/kubelet/pods/04ab41a3-3d00-4fbd-8e1f-2995658eba9c/volumes" Dec 02 10:22:11 crc kubenswrapper[4685]: I1202 10:22:11.914855 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2110513d-db84-444d-8b69-22a03e65c800" path="/var/lib/kubelet/pods/2110513d-db84-444d-8b69-22a03e65c800/volumes" Dec 02 10:22:12 crc kubenswrapper[4685]: I1202 10:22:12.126506 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:12 crc kubenswrapper[4685]: I1202 10:22:12.968177 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerStarted","Data":"2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9"} Dec 02 10:22:12 crc kubenswrapper[4685]: I1202 10:22:12.968735 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerStarted","Data":"6c06ada47f9e71e72c4226a6e9e8278638a20fb35f263633280bf3b35525eb13"} Dec 02 10:22:13 crc kubenswrapper[4685]: I1202 10:22:13.985348 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerStarted","Data":"7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4"} Dec 02 10:22:14 crc kubenswrapper[4685]: I1202 10:22:14.995944 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerStarted","Data":"9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf"} Dec 02 10:22:18 crc kubenswrapper[4685]: I1202 10:22:18.621134 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 02 10:22:23 crc kubenswrapper[4685]: I1202 10:22:23.942908 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:24 crc kubenswrapper[4685]: I1202 10:22:24.383779 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:22:24 crc kubenswrapper[4685]: I1202 10:22:24.384284 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-log" containerID="cri-o://8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11" gracePeriod=30 Dec 02 10:22:24 crc kubenswrapper[4685]: I1202 10:22:24.384744 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-httpd" containerID="cri-o://2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1" gracePeriod=30 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.136825 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hddhn" event={"ID":"469cbb32-2f63-4f4b-813d-e07e778e0eaf","Type":"ContainerStarted","Data":"c5949b2135f1a4ff717be8568ada6c183dd79f7b288d5904b17b0246ddff2a79"} Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.138480 4685 generic.go:334] "Generic (PLEG): container finished" podID="0fe8705d-216c-4be5-9fd4-41671563b136" containerID="8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11" exitCode=143 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.138550 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0fe8705d-216c-4be5-9fd4-41671563b136","Type":"ContainerDied","Data":"8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11"} Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.141145 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerStarted","Data":"530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099"} Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.141334 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-central-agent" containerID="cri-o://2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9" gracePeriod=30 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.141387 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="proxy-httpd" containerID="cri-o://530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099" gracePeriod=30 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.141358 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.141409 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-notification-agent" containerID="cri-o://7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4" gracePeriod=30 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.141590 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="sg-core" containerID="cri-o://9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf" gracePeriod=30 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.182718 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-hddhn" podStartSLOduration=2.646747691 podStartE2EDuration="16.182701814s" podCreationTimestamp="2025-12-02 10:22:09 +0000 UTC" firstStartedPulling="2025-12-02 10:22:10.468970503 +0000 UTC m=+1222.840744657" lastFinishedPulling="2025-12-02 10:22:24.004924626 +0000 UTC m=+1236.376698780" observedRunningTime="2025-12-02 10:22:25.177160052 +0000 UTC m=+1237.548934206" watchObservedRunningTime="2025-12-02 10:22:25.182701814 +0000 UTC m=+1237.554475968" Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.629218 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.766923522 podStartE2EDuration="14.629200494s" podCreationTimestamp="2025-12-02 10:22:11 +0000 UTC" firstStartedPulling="2025-12-02 10:22:12.142356816 +0000 UTC m=+1224.514130960" lastFinishedPulling="2025-12-02 10:22:24.004633778 +0000 UTC m=+1236.376407932" observedRunningTime="2025-12-02 10:22:25.239710934 +0000 UTC m=+1237.611485088" watchObservedRunningTime="2025-12-02 10:22:25.629200494 +0000 UTC m=+1238.000974648" Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.636717 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.636963 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-log" containerID="cri-o://7e9a07892190aef654529f902cb94df662ac02e02b87c4aaa33410a8534f0a95" gracePeriod=30 Dec 02 10:22:25 crc kubenswrapper[4685]: I1202 10:22:25.637066 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-httpd" containerID="cri-o://cc9c0b34e6fe1faa8b475b98c4e603f609064439c55db34adb4078339b6d282b" gracePeriod=30 Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.150742 4685 generic.go:334] "Generic (PLEG): container finished" podID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerID="7e9a07892190aef654529f902cb94df662ac02e02b87c4aaa33410a8534f0a95" exitCode=143 Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.150818 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345","Type":"ContainerDied","Data":"7e9a07892190aef654529f902cb94df662ac02e02b87c4aaa33410a8534f0a95"} Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.153070 4685 generic.go:334] "Generic (PLEG): container finished" podID="887a10dd-943c-46e0-bb8d-4648208f8877" containerID="530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099" exitCode=0 Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.153095 4685 generic.go:334] "Generic (PLEG): container finished" podID="887a10dd-943c-46e0-bb8d-4648208f8877" containerID="9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf" exitCode=2 Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.153104 4685 generic.go:334] "Generic (PLEG): container finished" podID="887a10dd-943c-46e0-bb8d-4648208f8877" containerID="2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9" exitCode=0 Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.153820 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerDied","Data":"530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099"} Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.153862 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerDied","Data":"9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf"} Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.153872 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerDied","Data":"2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9"} Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.778224 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825375 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-scripts\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825467 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljc68\" (UniqueName: \"kubernetes.io/projected/887a10dd-943c-46e0-bb8d-4648208f8877-kube-api-access-ljc68\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825493 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-config-data\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825520 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-run-httpd\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825603 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-log-httpd\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825699 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-combined-ca-bundle\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.825789 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-sg-core-conf-yaml\") pod \"887a10dd-943c-46e0-bb8d-4648208f8877\" (UID: \"887a10dd-943c-46e0-bb8d-4648208f8877\") " Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.826232 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.826285 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.841790 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/887a10dd-943c-46e0-bb8d-4648208f8877-kube-api-access-ljc68" (OuterVolumeSpecName: "kube-api-access-ljc68") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "kube-api-access-ljc68". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.856695 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-scripts" (OuterVolumeSpecName: "scripts") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.928521 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.928644 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljc68\" (UniqueName: \"kubernetes.io/projected/887a10dd-943c-46e0-bb8d-4648208f8877-kube-api-access-ljc68\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.928664 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.928672 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/887a10dd-943c-46e0-bb8d-4648208f8877-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.929886 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.954658 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:26 crc kubenswrapper[4685]: I1202 10:22:26.986785 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-config-data" (OuterVolumeSpecName: "config-data") pod "887a10dd-943c-46e0-bb8d-4648208f8877" (UID: "887a10dd-943c-46e0-bb8d-4648208f8877"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.030147 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.030180 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.030189 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/887a10dd-943c-46e0-bb8d-4648208f8877-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.163884 4685 generic.go:334] "Generic (PLEG): container finished" podID="887a10dd-943c-46e0-bb8d-4648208f8877" containerID="7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4" exitCode=0 Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.163925 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerDied","Data":"7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4"} Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.163951 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"887a10dd-943c-46e0-bb8d-4648208f8877","Type":"ContainerDied","Data":"6c06ada47f9e71e72c4226a6e9e8278638a20fb35f263633280bf3b35525eb13"} Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.163966 4685 scope.go:117] "RemoveContainer" containerID="530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.164092 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.190299 4685 scope.go:117] "RemoveContainer" containerID="9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.202528 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.210078 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.212650 4685 scope.go:117] "RemoveContainer" containerID="7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.242450 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.243151 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="proxy-httpd" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243175 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="proxy-httpd" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.243190 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-central-agent" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243199 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-central-agent" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.243220 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="sg-core" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243227 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="sg-core" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.243248 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-notification-agent" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243256 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-notification-agent" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243475 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-central-agent" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243488 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="ceilometer-notification-agent" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243499 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="proxy-httpd" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243511 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" containerName="sg-core" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.243921 4685 scope.go:117] "RemoveContainer" containerID="2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.245109 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.247858 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.248002 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.266898 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.312140 4685 scope.go:117] "RemoveContainer" containerID="530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.316836 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099\": container with ID starting with 530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099 not found: ID does not exist" containerID="530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.316885 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099"} err="failed to get container status \"530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099\": rpc error: code = NotFound desc = could not find container \"530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099\": container with ID starting with 530d07e2a4398bec6e7b31e3d3bc81ba3f20074607bff2bb07306195c7a3c099 not found: ID does not exist" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.316912 4685 scope.go:117] "RemoveContainer" containerID="9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.320722 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf\": container with ID starting with 9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf not found: ID does not exist" containerID="9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.320785 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf"} err="failed to get container status \"9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf\": rpc error: code = NotFound desc = could not find container \"9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf\": container with ID starting with 9e55317cedc3a0b03c8fabeb0f71f038f17e28a4a6cb0b98842bcf7e803faaaf not found: ID does not exist" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.320815 4685 scope.go:117] "RemoveContainer" containerID="7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.324630 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4\": container with ID starting with 7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4 not found: ID does not exist" containerID="7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.324686 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4"} err="failed to get container status \"7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4\": rpc error: code = NotFound desc = could not find container \"7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4\": container with ID starting with 7bd831b20a3e9ef02870ff44e93b9455b4cc019a9b4ff10b956bba8d2254cbe4 not found: ID does not exist" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.324732 4685 scope.go:117] "RemoveContainer" containerID="2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9" Dec 02 10:22:27 crc kubenswrapper[4685]: E1202 10:22:27.328701 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9\": container with ID starting with 2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9 not found: ID does not exist" containerID="2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.328786 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9"} err="failed to get container status \"2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9\": rpc error: code = NotFound desc = could not find container \"2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9\": container with ID starting with 2fd98d957a732d3549c272eef9e12312d2ee1b99c7f3d0044489ff05158f8be9 not found: ID does not exist" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337772 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q95z\" (UniqueName: \"kubernetes.io/projected/b44457fc-79e0-46e1-8fc2-383578ece56f-kube-api-access-7q95z\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337811 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-run-httpd\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337834 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-config-data\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337850 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-log-httpd\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337915 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337948 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.337984 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-scripts\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.439347 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q95z\" (UniqueName: \"kubernetes.io/projected/b44457fc-79e0-46e1-8fc2-383578ece56f-kube-api-access-7q95z\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.439655 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-run-httpd\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.440281 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-config-data\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.440220 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-run-httpd\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.440961 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-log-httpd\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.441243 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.441342 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.441440 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-scripts\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.441643 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-log-httpd\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.444202 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.446801 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-config-data\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.448341 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-scripts\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.452061 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.471210 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q95z\" (UniqueName: \"kubernetes.io/projected/b44457fc-79e0-46e1-8fc2-383578ece56f-kube-api-access-7q95z\") pod \"ceilometer-0\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.565196 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:27 crc kubenswrapper[4685]: I1202 10:22:27.913922 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="887a10dd-943c-46e0-bb8d-4648208f8877" path="/var/lib/kubelet/pods/887a10dd-943c-46e0-bb8d-4648208f8877/volumes" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.048392 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.118549 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:28 crc kubenswrapper[4685]: W1202 10:22:28.126288 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb44457fc_79e0_46e1_8fc2_383578ece56f.slice/crio-d105738a2d1ecc37e214b3931cbef3320033a111d1a64259149a5b4f98ff56c4 WatchSource:0}: Error finding container d105738a2d1ecc37e214b3931cbef3320033a111d1a64259149a5b4f98ff56c4: Status 404 returned error can't find the container with id d105738a2d1ecc37e214b3931cbef3320033a111d1a64259149a5b4f98ff56c4 Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.128801 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154214 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-combined-ca-bundle\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154253 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-scripts\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154346 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154368 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-config-data\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154405 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-logs\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154466 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-httpd-run\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154481 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-public-tls-certs\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.154502 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rs4c\" (UniqueName: \"kubernetes.io/projected/0fe8705d-216c-4be5-9fd4-41671563b136-kube-api-access-4rs4c\") pod \"0fe8705d-216c-4be5-9fd4-41671563b136\" (UID: \"0fe8705d-216c-4be5-9fd4-41671563b136\") " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.157946 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.158204 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-logs" (OuterVolumeSpecName: "logs") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.163770 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.166735 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fe8705d-216c-4be5-9fd4-41671563b136-kube-api-access-4rs4c" (OuterVolumeSpecName: "kube-api-access-4rs4c") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "kube-api-access-4rs4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.168313 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-scripts" (OuterVolumeSpecName: "scripts") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.179470 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerStarted","Data":"d105738a2d1ecc37e214b3931cbef3320033a111d1a64259149a5b4f98ff56c4"} Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.206906 4685 generic.go:334] "Generic (PLEG): container finished" podID="0fe8705d-216c-4be5-9fd4-41671563b136" containerID="2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1" exitCode=0 Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.206960 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0fe8705d-216c-4be5-9fd4-41671563b136","Type":"ContainerDied","Data":"2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1"} Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.206992 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0fe8705d-216c-4be5-9fd4-41671563b136","Type":"ContainerDied","Data":"99eed0d2234972e58ae99006c27a77a7c7a436c60a5c88bf00af184c1493f271"} Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.207012 4685 scope.go:117] "RemoveContainer" containerID="2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.207164 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.231090 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.237802 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.254295 4685 scope.go:117] "RemoveContainer" containerID="8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257504 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257529 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257540 4685 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0fe8705d-216c-4be5-9fd4-41671563b136-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257552 4685 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257578 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rs4c\" (UniqueName: \"kubernetes.io/projected/0fe8705d-216c-4be5-9fd4-41671563b136-kube-api-access-4rs4c\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257589 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.257600 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.263731 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-config-data" (OuterVolumeSpecName: "config-data") pod "0fe8705d-216c-4be5-9fd4-41671563b136" (UID: "0fe8705d-216c-4be5-9fd4-41671563b136"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.294446 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.315980 4685 scope.go:117] "RemoveContainer" containerID="2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1" Dec 02 10:22:28 crc kubenswrapper[4685]: E1202 10:22:28.316420 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1\": container with ID starting with 2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1 not found: ID does not exist" containerID="2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.316456 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1"} err="failed to get container status \"2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1\": rpc error: code = NotFound desc = could not find container \"2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1\": container with ID starting with 2a433a5959d7d0672e4b9aaae20b46e6cf287060de4ecb6c840664aef8442ac1 not found: ID does not exist" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.316479 4685 scope.go:117] "RemoveContainer" containerID="8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11" Dec 02 10:22:28 crc kubenswrapper[4685]: E1202 10:22:28.318064 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11\": container with ID starting with 8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11 not found: ID does not exist" containerID="8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.318090 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11"} err="failed to get container status \"8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11\": rpc error: code = NotFound desc = could not find container \"8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11\": container with ID starting with 8ffc271212eee3ec47bd7e27e102ffda516434485d125249e7a274218498ca11 not found: ID does not exist" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.374856 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.374890 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe8705d-216c-4be5-9fd4-41671563b136-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.542234 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.552150 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.571956 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:22:28 crc kubenswrapper[4685]: E1202 10:22:28.572402 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-log" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.572428 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-log" Dec 02 10:22:28 crc kubenswrapper[4685]: E1202 10:22:28.572464 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-httpd" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.572475 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-httpd" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.572686 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-httpd" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.572706 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" containerName="glance-log" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.573654 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.578283 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.578415 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.588081 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681099 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ncpj\" (UniqueName: \"kubernetes.io/projected/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-kube-api-access-8ncpj\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681162 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681197 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-config-data\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681251 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681318 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681358 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-scripts\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681386 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-logs\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.681603 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.782902 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783294 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-scripts\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783327 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-logs\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783407 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783493 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ncpj\" (UniqueName: \"kubernetes.io/projected/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-kube-api-access-8ncpj\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783525 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783549 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-config-data\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.783624 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.784185 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.784504 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-logs\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.786439 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.791673 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.792314 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.792652 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-config-data\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.803867 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-scripts\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.829338 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ncpj\" (UniqueName: \"kubernetes.io/projected/ed1ab1f1-1e32-439f-91cf-ba12aca0273a-kube-api-access-8ncpj\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.831800 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"ed1ab1f1-1e32-439f-91cf-ba12aca0273a\") " pod="openstack/glance-default-external-api-0" Dec 02 10:22:28 crc kubenswrapper[4685]: I1202 10:22:28.889479 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 02 10:22:29 crc kubenswrapper[4685]: I1202 10:22:29.230516 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerStarted","Data":"c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2"} Dec 02 10:22:29 crc kubenswrapper[4685]: I1202 10:22:29.234203 4685 generic.go:334] "Generic (PLEG): container finished" podID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerID="cc9c0b34e6fe1faa8b475b98c4e603f609064439c55db34adb4078339b6d282b" exitCode=0 Dec 02 10:22:29 crc kubenswrapper[4685]: I1202 10:22:29.234245 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345","Type":"ContainerDied","Data":"cc9c0b34e6fe1faa8b475b98c4e603f609064439c55db34adb4078339b6d282b"} Dec 02 10:22:29 crc kubenswrapper[4685]: I1202 10:22:29.476680 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 02 10:22:29 crc kubenswrapper[4685]: I1202 10:22:29.933984 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fe8705d-216c-4be5-9fd4-41671563b136" path="/var/lib/kubelet/pods/0fe8705d-216c-4be5-9fd4-41671563b136/volumes" Dec 02 10:22:29 crc kubenswrapper[4685]: I1202 10:22:29.947814 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.009702 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-scripts\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.009883 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngt6m\" (UniqueName: \"kubernetes.io/projected/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-kube-api-access-ngt6m\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.009913 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-httpd-run\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.009929 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-combined-ca-bundle\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.009967 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-config-data\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.010002 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-internal-tls-certs\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.010062 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.010126 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-logs\") pod \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\" (UID: \"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345\") " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.021657 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-logs" (OuterVolumeSpecName: "logs") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.023864 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.027820 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-scripts" (OuterVolumeSpecName: "scripts") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.034269 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-kube-api-access-ngt6m" (OuterVolumeSpecName: "kube-api-access-ngt6m") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "kube-api-access-ngt6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.035176 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.100328 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.116805 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.116851 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.116860 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.116869 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngt6m\" (UniqueName: \"kubernetes.io/projected/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-kube-api-access-ngt6m\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.116880 4685 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.116889 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.137791 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.138910 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.156508 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-config-data" (OuterVolumeSpecName: "config-data") pod "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" (UID: "e98cf701-a98d-44bb-9bf2-1e0e0d7d5345"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.218308 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.218332 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.218344 4685 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.255120 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.255378 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98cf701-a98d-44bb-9bf2-1e0e0d7d5345","Type":"ContainerDied","Data":"85bf843c6e7c5c6b457258b1111a9ac6219f2c174a04d655f376dece2a23f152"} Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.255446 4685 scope.go:117] "RemoveContainer" containerID="cc9c0b34e6fe1faa8b475b98c4e603f609064439c55db34adb4078339b6d282b" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.270027 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed1ab1f1-1e32-439f-91cf-ba12aca0273a","Type":"ContainerStarted","Data":"a153e18899b749cd317ec4d7ada71b6cd5ac8c03ed6229300ba794c015fd0ede"} Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.270079 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed1ab1f1-1e32-439f-91cf-ba12aca0273a","Type":"ContainerStarted","Data":"f57e2e054a40538f6cd6a0a5b75d98226145194e8e9616373e6b048de9892441"} Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.308395 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.317905 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.320277 4685 scope.go:117] "RemoveContainer" containerID="7e9a07892190aef654529f902cb94df662ac02e02b87c4aaa33410a8534f0a95" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.344630 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:22:30 crc kubenswrapper[4685]: E1202 10:22:30.345063 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-httpd" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.345084 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-httpd" Dec 02 10:22:30 crc kubenswrapper[4685]: E1202 10:22:30.345143 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-log" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.345151 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-log" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.345303 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-httpd" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.345328 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" containerName="glance-log" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.346418 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.349391 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.353289 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.361858 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.424160 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.424587 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.424827 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-logs\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.425096 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvstm\" (UniqueName: \"kubernetes.io/projected/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-kube-api-access-hvstm\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.425241 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.425401 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.425547 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.425752 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.527737 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-logs\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528007 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvstm\" (UniqueName: \"kubernetes.io/projected/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-kube-api-access-hvstm\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528088 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528202 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528304 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528405 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528540 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528649 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528308 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-logs\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.528422 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.530736 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.542453 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.554701 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvstm\" (UniqueName: \"kubernetes.io/projected/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-kube-api-access-hvstm\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.557214 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.559167 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.571664 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37ce4a0b-7871-4784-a19e-36bd7b62ebbe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.573310 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"37ce4a0b-7871-4784-a19e-36bd7b62ebbe\") " pod="openstack/glance-default-internal-api-0" Dec 02 10:22:30 crc kubenswrapper[4685]: E1202 10:22:30.582931 4685 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode98cf701_a98d_44bb_9bf2_1e0e0d7d5345.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode98cf701_a98d_44bb_9bf2_1e0e0d7d5345.slice/crio-85bf843c6e7c5c6b457258b1111a9ac6219f2c174a04d655f376dece2a23f152\": RecentStats: unable to find data in memory cache]" Dec 02 10:22:30 crc kubenswrapper[4685]: I1202 10:22:30.745735 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:31 crc kubenswrapper[4685]: I1202 10:22:31.205302 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 02 10:22:31 crc kubenswrapper[4685]: I1202 10:22:31.290124 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ed1ab1f1-1e32-439f-91cf-ba12aca0273a","Type":"ContainerStarted","Data":"4f37df54655ce8ba44dbf452a93941b5bccba2b1a2928f292f0e844a1ea4f74f"} Dec 02 10:22:31 crc kubenswrapper[4685]: I1202 10:22:31.292720 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37ce4a0b-7871-4784-a19e-36bd7b62ebbe","Type":"ContainerStarted","Data":"20f6f368c9d1ab14cc36a7ee01927261e33bc5d0f4b6e86a5c62972a01fb13e2"} Dec 02 10:22:31 crc kubenswrapper[4685]: I1202 10:22:31.315260 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerStarted","Data":"1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93"} Dec 02 10:22:31 crc kubenswrapper[4685]: I1202 10:22:31.328889 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.3288668599999998 podStartE2EDuration="3.32886686s" podCreationTimestamp="2025-12-02 10:22:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:31.326096404 +0000 UTC m=+1243.697870558" watchObservedRunningTime="2025-12-02 10:22:31.32886686 +0000 UTC m=+1243.700641024" Dec 02 10:22:31 crc kubenswrapper[4685]: I1202 10:22:31.925339 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e98cf701-a98d-44bb-9bf2-1e0e0d7d5345" path="/var/lib/kubelet/pods/e98cf701-a98d-44bb-9bf2-1e0e0d7d5345/volumes" Dec 02 10:22:32 crc kubenswrapper[4685]: I1202 10:22:32.362072 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37ce4a0b-7871-4784-a19e-36bd7b62ebbe","Type":"ContainerStarted","Data":"efda705628039ee727b84d76a9a54b5f29f4f8046bc3b995b25a7aad8e36f229"} Dec 02 10:22:32 crc kubenswrapper[4685]: I1202 10:22:32.372873 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerStarted","Data":"4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e"} Dec 02 10:22:33 crc kubenswrapper[4685]: I1202 10:22:33.387439 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37ce4a0b-7871-4784-a19e-36bd7b62ebbe","Type":"ContainerStarted","Data":"321fa88b5bfb9d2cdb6a3a8b7fc9e48ba9c2735c74196a6fd21c921bbcd4e44f"} Dec 02 10:22:33 crc kubenswrapper[4685]: I1202 10:22:33.393828 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerStarted","Data":"44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a"} Dec 02 10:22:33 crc kubenswrapper[4685]: I1202 10:22:33.394163 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:22:33 crc kubenswrapper[4685]: I1202 10:22:33.419428 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.419409571 podStartE2EDuration="3.419409571s" podCreationTimestamp="2025-12-02 10:22:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:33.411043052 +0000 UTC m=+1245.782817196" watchObservedRunningTime="2025-12-02 10:22:33.419409571 +0000 UTC m=+1245.791183725" Dec 02 10:22:33 crc kubenswrapper[4685]: I1202 10:22:33.450243 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.731345224 podStartE2EDuration="6.450224765s" podCreationTimestamp="2025-12-02 10:22:27 +0000 UTC" firstStartedPulling="2025-12-02 10:22:28.128609276 +0000 UTC m=+1240.500383430" lastFinishedPulling="2025-12-02 10:22:32.847488817 +0000 UTC m=+1245.219262971" observedRunningTime="2025-12-02 10:22:33.449620678 +0000 UTC m=+1245.821394832" watchObservedRunningTime="2025-12-02 10:22:33.450224765 +0000 UTC m=+1245.821998909" Dec 02 10:22:34 crc kubenswrapper[4685]: I1202 10:22:34.018487 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:35 crc kubenswrapper[4685]: I1202 10:22:35.422123 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-central-agent" containerID="cri-o://c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2" gracePeriod=30 Dec 02 10:22:35 crc kubenswrapper[4685]: I1202 10:22:35.422208 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-notification-agent" containerID="cri-o://1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93" gracePeriod=30 Dec 02 10:22:35 crc kubenswrapper[4685]: I1202 10:22:35.422208 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="sg-core" containerID="cri-o://4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e" gracePeriod=30 Dec 02 10:22:35 crc kubenswrapper[4685]: I1202 10:22:35.422240 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="proxy-httpd" containerID="cri-o://44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a" gracePeriod=30 Dec 02 10:22:36 crc kubenswrapper[4685]: I1202 10:22:36.442083 4685 generic.go:334] "Generic (PLEG): container finished" podID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerID="44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a" exitCode=0 Dec 02 10:22:36 crc kubenswrapper[4685]: I1202 10:22:36.442410 4685 generic.go:334] "Generic (PLEG): container finished" podID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerID="4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e" exitCode=2 Dec 02 10:22:36 crc kubenswrapper[4685]: I1202 10:22:36.442428 4685 generic.go:334] "Generic (PLEG): container finished" podID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerID="1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93" exitCode=0 Dec 02 10:22:36 crc kubenswrapper[4685]: I1202 10:22:36.442437 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerDied","Data":"44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a"} Dec 02 10:22:36 crc kubenswrapper[4685]: I1202 10:22:36.442479 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerDied","Data":"4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e"} Dec 02 10:22:36 crc kubenswrapper[4685]: I1202 10:22:36.442491 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerDied","Data":"1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93"} Dec 02 10:22:38 crc kubenswrapper[4685]: I1202 10:22:38.891229 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 10:22:38 crc kubenswrapper[4685]: I1202 10:22:38.891459 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 02 10:22:38 crc kubenswrapper[4685]: I1202 10:22:38.926165 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 10:22:38 crc kubenswrapper[4685]: I1202 10:22:38.962485 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.128829 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294411 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-sg-core-conf-yaml\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294509 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q95z\" (UniqueName: \"kubernetes.io/projected/b44457fc-79e0-46e1-8fc2-383578ece56f-kube-api-access-7q95z\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294538 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-log-httpd\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294627 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-combined-ca-bundle\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294693 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-scripts\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294763 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-run-httpd\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.294863 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-config-data\") pod \"b44457fc-79e0-46e1-8fc2-383578ece56f\" (UID: \"b44457fc-79e0-46e1-8fc2-383578ece56f\") " Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.295502 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.295870 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.301090 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-scripts" (OuterVolumeSpecName: "scripts") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.301146 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b44457fc-79e0-46e1-8fc2-383578ece56f-kube-api-access-7q95z" (OuterVolumeSpecName: "kube-api-access-7q95z") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "kube-api-access-7q95z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.341008 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.394401 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.398435 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.398471 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.398484 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.398496 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.398508 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q95z\" (UniqueName: \"kubernetes.io/projected/b44457fc-79e0-46e1-8fc2-383578ece56f-kube-api-access-7q95z\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.398522 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b44457fc-79e0-46e1-8fc2-383578ece56f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.415554 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-config-data" (OuterVolumeSpecName: "config-data") pod "b44457fc-79e0-46e1-8fc2-383578ece56f" (UID: "b44457fc-79e0-46e1-8fc2-383578ece56f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.476659 4685 generic.go:334] "Generic (PLEG): container finished" podID="469cbb32-2f63-4f4b-813d-e07e778e0eaf" containerID="c5949b2135f1a4ff717be8568ada6c183dd79f7b288d5904b17b0246ddff2a79" exitCode=0 Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.476744 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hddhn" event={"ID":"469cbb32-2f63-4f4b-813d-e07e778e0eaf","Type":"ContainerDied","Data":"c5949b2135f1a4ff717be8568ada6c183dd79f7b288d5904b17b0246ddff2a79"} Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.480464 4685 generic.go:334] "Generic (PLEG): container finished" podID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerID="c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2" exitCode=0 Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.480509 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.480549 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerDied","Data":"c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2"} Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.480624 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b44457fc-79e0-46e1-8fc2-383578ece56f","Type":"ContainerDied","Data":"d105738a2d1ecc37e214b3931cbef3320033a111d1a64259149a5b4f98ff56c4"} Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.480645 4685 scope.go:117] "RemoveContainer" containerID="44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.481109 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.481149 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.501090 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b44457fc-79e0-46e1-8fc2-383578ece56f-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.502016 4685 scope.go:117] "RemoveContainer" containerID="4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.529550 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.539798 4685 scope.go:117] "RemoveContainer" containerID="1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.550345 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.557835 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.558447 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-notification-agent" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.558575 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-notification-agent" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.558648 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-central-agent" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.558710 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-central-agent" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.558774 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="sg-core" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.558824 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="sg-core" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.558879 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="proxy-httpd" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.558928 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="proxy-httpd" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.559186 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-central-agent" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.559271 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="sg-core" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.559330 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="ceilometer-notification-agent" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.559401 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" containerName="proxy-httpd" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.561200 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.565052 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.565772 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.570053 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.583638 4685 scope.go:117] "RemoveContainer" containerID="c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.617454 4685 scope.go:117] "RemoveContainer" containerID="44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.618106 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a\": container with ID starting with 44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a not found: ID does not exist" containerID="44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.618171 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a"} err="failed to get container status \"44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a\": rpc error: code = NotFound desc = could not find container \"44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a\": container with ID starting with 44bc9d5760259e1e1840c46e0c2a49055618a4d12b90400a6019194120c2a47a not found: ID does not exist" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.618207 4685 scope.go:117] "RemoveContainer" containerID="4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.618689 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e\": container with ID starting with 4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e not found: ID does not exist" containerID="4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.618730 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e"} err="failed to get container status \"4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e\": rpc error: code = NotFound desc = could not find container \"4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e\": container with ID starting with 4e324eea19357d6ca7e5fa440206e15321bcd4f5eb6c4c1f548fea69423c152e not found: ID does not exist" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.618755 4685 scope.go:117] "RemoveContainer" containerID="1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.618984 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93\": container with ID starting with 1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93 not found: ID does not exist" containerID="1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.619007 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93"} err="failed to get container status \"1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93\": rpc error: code = NotFound desc = could not find container \"1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93\": container with ID starting with 1a6d17e31474149c17d31f2c43e9a46d99e9151065bf4f2bebb35b58e9b4fe93 not found: ID does not exist" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.619023 4685 scope.go:117] "RemoveContainer" containerID="c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2" Dec 02 10:22:39 crc kubenswrapper[4685]: E1202 10:22:39.619272 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2\": container with ID starting with c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2 not found: ID does not exist" containerID="c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.619299 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2"} err="failed to get container status \"c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2\": rpc error: code = NotFound desc = could not find container \"c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2\": container with ID starting with c1d2b27cffb94711bcf29b98e4d7acabbb82c2aff600f9fdcbf774d32aa29ca2 not found: ID does not exist" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.704701 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.705031 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-scripts\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.705072 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-log-httpd\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.705086 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-run-httpd\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.705109 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.705168 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmm79\" (UniqueName: \"kubernetes.io/projected/9d080023-cf9c-41cb-a5e7-17801981562b-kube-api-access-zmm79\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.705195 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-config-data\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807138 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807209 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-scripts\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807244 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-log-httpd\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807260 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-run-httpd\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807284 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807343 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmm79\" (UniqueName: \"kubernetes.io/projected/9d080023-cf9c-41cb-a5e7-17801981562b-kube-api-access-zmm79\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.807368 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-config-data\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.808358 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-run-httpd\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.808702 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-log-httpd\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.811285 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.811976 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-config-data\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.812397 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-scripts\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.824118 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.833458 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmm79\" (UniqueName: \"kubernetes.io/projected/9d080023-cf9c-41cb-a5e7-17801981562b-kube-api-access-zmm79\") pod \"ceilometer-0\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.891375 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:39 crc kubenswrapper[4685]: I1202 10:22:39.924452 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b44457fc-79e0-46e1-8fc2-383578ece56f" path="/var/lib/kubelet/pods/b44457fc-79e0-46e1-8fc2-383578ece56f/volumes" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.377734 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.489553 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerStarted","Data":"4dd790c087df84b4c3f3234c880d4132cfbaf8c4d6038018510e85c7696c94c6"} Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.746330 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.746625 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.811890 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.850437 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.865223 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.933064 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-config-data\") pod \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.933152 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb7vj\" (UniqueName: \"kubernetes.io/projected/469cbb32-2f63-4f4b-813d-e07e778e0eaf-kube-api-access-pb7vj\") pod \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.933188 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-scripts\") pod \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.933211 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-combined-ca-bundle\") pod \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\" (UID: \"469cbb32-2f63-4f4b-813d-e07e778e0eaf\") " Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.951162 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-scripts" (OuterVolumeSpecName: "scripts") pod "469cbb32-2f63-4f4b-813d-e07e778e0eaf" (UID: "469cbb32-2f63-4f4b-813d-e07e778e0eaf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.959828 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/469cbb32-2f63-4f4b-813d-e07e778e0eaf-kube-api-access-pb7vj" (OuterVolumeSpecName: "kube-api-access-pb7vj") pod "469cbb32-2f63-4f4b-813d-e07e778e0eaf" (UID: "469cbb32-2f63-4f4b-813d-e07e778e0eaf"). InnerVolumeSpecName "kube-api-access-pb7vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.991542 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "469cbb32-2f63-4f4b-813d-e07e778e0eaf" (UID: "469cbb32-2f63-4f4b-813d-e07e778e0eaf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:40 crc kubenswrapper[4685]: I1202 10:22:40.997392 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-config-data" (OuterVolumeSpecName: "config-data") pod "469cbb32-2f63-4f4b-813d-e07e778e0eaf" (UID: "469cbb32-2f63-4f4b-813d-e07e778e0eaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.035603 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.035631 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb7vj\" (UniqueName: \"kubernetes.io/projected/469cbb32-2f63-4f4b-813d-e07e778e0eaf-kube-api-access-pb7vj\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.035642 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.035651 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/469cbb32-2f63-4f4b-813d-e07e778e0eaf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.499054 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hddhn" event={"ID":"469cbb32-2f63-4f4b-813d-e07e778e0eaf","Type":"ContainerDied","Data":"5b31a1c3e41680efc9afe7849f2f9fd94ac7237af905157743c8227cc73d8deb"} Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.499444 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b31a1c3e41680efc9afe7849f2f9fd94ac7237af905157743c8227cc73d8deb" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.499410 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hddhn" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.502177 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerStarted","Data":"501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f"} Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.502370 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.502517 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.633867 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:41 crc kubenswrapper[4685]: E1202 10:22:41.634266 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="469cbb32-2f63-4f4b-813d-e07e778e0eaf" containerName="nova-cell0-conductor-db-sync" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.634285 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="469cbb32-2f63-4f4b-813d-e07e778e0eaf" containerName="nova-cell0-conductor-db-sync" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.634519 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="469cbb32-2f63-4f4b-813d-e07e778e0eaf" containerName="nova-cell0-conductor-db-sync" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.635154 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.637547 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.637784 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-gclvq" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.656490 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.749431 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.749530 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.749613 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnhbn\" (UniqueName: \"kubernetes.io/projected/49b02979-db28-4315-8ec3-6cad431c77ad-kube-api-access-xnhbn\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.848819 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.848925 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.850052 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.850818 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.850922 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.851002 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnhbn\" (UniqueName: \"kubernetes.io/projected/49b02979-db28-4315-8ec3-6cad431c77ad-kube-api-access-xnhbn\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.857081 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.869354 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.944676 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnhbn\" (UniqueName: \"kubernetes.io/projected/49b02979-db28-4315-8ec3-6cad431c77ad-kube-api-access-xnhbn\") pod \"nova-cell0-conductor-0\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:41 crc kubenswrapper[4685]: I1202 10:22:41.984279 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:42 crc kubenswrapper[4685]: I1202 10:22:42.514792 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerStarted","Data":"6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce"} Dec 02 10:22:42 crc kubenswrapper[4685]: W1202 10:22:42.516295 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49b02979_db28_4315_8ec3_6cad431c77ad.slice/crio-7554860d0f615d5dc2f69d53245abaab4460c46c4a04d3830034f97ce0c564c8 WatchSource:0}: Error finding container 7554860d0f615d5dc2f69d53245abaab4460c46c4a04d3830034f97ce0c564c8: Status 404 returned error can't find the container with id 7554860d0f615d5dc2f69d53245abaab4460c46c4a04d3830034f97ce0c564c8 Dec 02 10:22:42 crc kubenswrapper[4685]: I1202 10:22:42.523549 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:43 crc kubenswrapper[4685]: I1202 10:22:43.525140 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerStarted","Data":"c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381"} Dec 02 10:22:43 crc kubenswrapper[4685]: I1202 10:22:43.528241 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:22:43 crc kubenswrapper[4685]: I1202 10:22:43.528274 4685 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 02 10:22:43 crc kubenswrapper[4685]: I1202 10:22:43.528230 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"49b02979-db28-4315-8ec3-6cad431c77ad","Type":"ContainerStarted","Data":"68feb8fb2319ec5d1db140f1c61ce064d8284266769ca9b586bf3859ad5c6861"} Dec 02 10:22:43 crc kubenswrapper[4685]: I1202 10:22:43.528527 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"49b02979-db28-4315-8ec3-6cad431c77ad","Type":"ContainerStarted","Data":"7554860d0f615d5dc2f69d53245abaab4460c46c4a04d3830034f97ce0c564c8"} Dec 02 10:22:43 crc kubenswrapper[4685]: I1202 10:22:43.551071 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.551046584 podStartE2EDuration="2.551046584s" podCreationTimestamp="2025-12-02 10:22:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:43.544828434 +0000 UTC m=+1255.916602588" watchObservedRunningTime="2025-12-02 10:22:43.551046584 +0000 UTC m=+1255.922820738" Dec 02 10:22:44 crc kubenswrapper[4685]: I1202 10:22:44.297668 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:44 crc kubenswrapper[4685]: I1202 10:22:44.302408 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 02 10:22:44 crc kubenswrapper[4685]: I1202 10:22:44.541691 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerStarted","Data":"78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee"} Dec 02 10:22:44 crc kubenswrapper[4685]: I1202 10:22:44.541970 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:44 crc kubenswrapper[4685]: I1202 10:22:44.562015 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.89040141 podStartE2EDuration="5.561997635s" podCreationTimestamp="2025-12-02 10:22:39 +0000 UTC" firstStartedPulling="2025-12-02 10:22:40.38584581 +0000 UTC m=+1252.757619964" lastFinishedPulling="2025-12-02 10:22:44.057442035 +0000 UTC m=+1256.429216189" observedRunningTime="2025-12-02 10:22:44.558413017 +0000 UTC m=+1256.930187191" watchObservedRunningTime="2025-12-02 10:22:44.561997635 +0000 UTC m=+1256.933771789" Dec 02 10:22:45 crc kubenswrapper[4685]: I1202 10:22:45.179803 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:45 crc kubenswrapper[4685]: I1202 10:22:45.549485 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:22:46 crc kubenswrapper[4685]: I1202 10:22:46.556686 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="49b02979-db28-4315-8ec3-6cad431c77ad" containerName="nova-cell0-conductor-conductor" containerID="cri-o://68feb8fb2319ec5d1db140f1c61ce064d8284266769ca9b586bf3859ad5c6861" gracePeriod=30 Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.578517 4685 generic.go:334] "Generic (PLEG): container finished" podID="49b02979-db28-4315-8ec3-6cad431c77ad" containerID="68feb8fb2319ec5d1db140f1c61ce064d8284266769ca9b586bf3859ad5c6861" exitCode=0 Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.578860 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"49b02979-db28-4315-8ec3-6cad431c77ad","Type":"ContainerDied","Data":"68feb8fb2319ec5d1db140f1c61ce064d8284266769ca9b586bf3859ad5c6861"} Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.759309 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.759652 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-central-agent" containerID="cri-o://501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f" gracePeriod=30 Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.759692 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="proxy-httpd" containerID="cri-o://78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee" gracePeriod=30 Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.759824 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="sg-core" containerID="cri-o://c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381" gracePeriod=30 Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.759886 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-notification-agent" containerID="cri-o://6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce" gracePeriod=30 Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.808727 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.976639 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-combined-ca-bundle\") pod \"49b02979-db28-4315-8ec3-6cad431c77ad\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.976696 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnhbn\" (UniqueName: \"kubernetes.io/projected/49b02979-db28-4315-8ec3-6cad431c77ad-kube-api-access-xnhbn\") pod \"49b02979-db28-4315-8ec3-6cad431c77ad\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.976733 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-config-data\") pod \"49b02979-db28-4315-8ec3-6cad431c77ad\" (UID: \"49b02979-db28-4315-8ec3-6cad431c77ad\") " Dec 02 10:22:47 crc kubenswrapper[4685]: I1202 10:22:47.989322 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49b02979-db28-4315-8ec3-6cad431c77ad-kube-api-access-xnhbn" (OuterVolumeSpecName: "kube-api-access-xnhbn") pod "49b02979-db28-4315-8ec3-6cad431c77ad" (UID: "49b02979-db28-4315-8ec3-6cad431c77ad"). InnerVolumeSpecName "kube-api-access-xnhbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.004684 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49b02979-db28-4315-8ec3-6cad431c77ad" (UID: "49b02979-db28-4315-8ec3-6cad431c77ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.005078 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-config-data" (OuterVolumeSpecName: "config-data") pod "49b02979-db28-4315-8ec3-6cad431c77ad" (UID: "49b02979-db28-4315-8ec3-6cad431c77ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.083373 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnhbn\" (UniqueName: \"kubernetes.io/projected/49b02979-db28-4315-8ec3-6cad431c77ad-kube-api-access-xnhbn\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.083402 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.083412 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49b02979-db28-4315-8ec3-6cad431c77ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.589834 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"49b02979-db28-4315-8ec3-6cad431c77ad","Type":"ContainerDied","Data":"7554860d0f615d5dc2f69d53245abaab4460c46c4a04d3830034f97ce0c564c8"} Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.589896 4685 scope.go:117] "RemoveContainer" containerID="68feb8fb2319ec5d1db140f1c61ce064d8284266769ca9b586bf3859ad5c6861" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.590003 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.598654 4685 generic.go:334] "Generic (PLEG): container finished" podID="9d080023-cf9c-41cb-a5e7-17801981562b" containerID="78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee" exitCode=0 Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.599047 4685 generic.go:334] "Generic (PLEG): container finished" podID="9d080023-cf9c-41cb-a5e7-17801981562b" containerID="c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381" exitCode=2 Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.599059 4685 generic.go:334] "Generic (PLEG): container finished" podID="9d080023-cf9c-41cb-a5e7-17801981562b" containerID="6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce" exitCode=0 Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.598850 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerDied","Data":"78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee"} Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.599090 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerDied","Data":"c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381"} Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.599101 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerDied","Data":"6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce"} Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.636550 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.647175 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.682519 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:48 crc kubenswrapper[4685]: E1202 10:22:48.683978 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49b02979-db28-4315-8ec3-6cad431c77ad" containerName="nova-cell0-conductor-conductor" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.684011 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="49b02979-db28-4315-8ec3-6cad431c77ad" containerName="nova-cell0-conductor-conductor" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.684723 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="49b02979-db28-4315-8ec3-6cad431c77ad" containerName="nova-cell0-conductor-conductor" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.685717 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.692871 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.696445 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-gclvq" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.697911 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.798509 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps9n9\" (UniqueName: \"kubernetes.io/projected/587a88c2-acbc-4104-959f-8dbc52f511de-kube-api-access-ps9n9\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.798803 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/587a88c2-acbc-4104-959f-8dbc52f511de-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.799129 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587a88c2-acbc-4104-959f-8dbc52f511de-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.900374 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps9n9\" (UniqueName: \"kubernetes.io/projected/587a88c2-acbc-4104-959f-8dbc52f511de-kube-api-access-ps9n9\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.900469 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/587a88c2-acbc-4104-959f-8dbc52f511de-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.900514 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587a88c2-acbc-4104-959f-8dbc52f511de-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.907235 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/587a88c2-acbc-4104-959f-8dbc52f511de-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.916895 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps9n9\" (UniqueName: \"kubernetes.io/projected/587a88c2-acbc-4104-959f-8dbc52f511de-kube-api-access-ps9n9\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:48 crc kubenswrapper[4685]: I1202 10:22:48.923545 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/587a88c2-acbc-4104-959f-8dbc52f511de-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"587a88c2-acbc-4104-959f-8dbc52f511de\") " pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:49 crc kubenswrapper[4685]: I1202 10:22:49.017511 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:49 crc kubenswrapper[4685]: I1202 10:22:49.488386 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 02 10:22:49 crc kubenswrapper[4685]: W1202 10:22:49.490986 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod587a88c2_acbc_4104_959f_8dbc52f511de.slice/crio-ef66294d1c7e2da141bad2da1106c1ae41770587759877d3521c2477c3320d6c WatchSource:0}: Error finding container ef66294d1c7e2da141bad2da1106c1ae41770587759877d3521c2477c3320d6c: Status 404 returned error can't find the container with id ef66294d1c7e2da141bad2da1106c1ae41770587759877d3521c2477c3320d6c Dec 02 10:22:49 crc kubenswrapper[4685]: I1202 10:22:49.611397 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"587a88c2-acbc-4104-959f-8dbc52f511de","Type":"ContainerStarted","Data":"ef66294d1c7e2da141bad2da1106c1ae41770587759877d3521c2477c3320d6c"} Dec 02 10:22:49 crc kubenswrapper[4685]: I1202 10:22:49.911150 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49b02979-db28-4315-8ec3-6cad431c77ad" path="/var/lib/kubelet/pods/49b02979-db28-4315-8ec3-6cad431c77ad/volumes" Dec 02 10:22:50 crc kubenswrapper[4685]: I1202 10:22:50.619905 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"587a88c2-acbc-4104-959f-8dbc52f511de","Type":"ContainerStarted","Data":"e9070fa4b2df1ca53e4e4aacea99944394970dd5c6144d5b81760f251dca0e26"} Dec 02 10:22:50 crc kubenswrapper[4685]: I1202 10:22:50.620049 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:50 crc kubenswrapper[4685]: I1202 10:22:50.637708 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.637689463 podStartE2EDuration="2.637689463s" podCreationTimestamp="2025-12-02 10:22:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:50.634598349 +0000 UTC m=+1263.006372543" watchObservedRunningTime="2025-12-02 10:22:50.637689463 +0000 UTC m=+1263.009463617" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.046099 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.134239 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200456 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-config-data\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200580 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-combined-ca-bundle\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200599 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-sg-core-conf-yaml\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200653 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-run-httpd\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200689 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-scripts\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200716 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-log-httpd\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.200743 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmm79\" (UniqueName: \"kubernetes.io/projected/9d080023-cf9c-41cb-a5e7-17801981562b-kube-api-access-zmm79\") pod \"9d080023-cf9c-41cb-a5e7-17801981562b\" (UID: \"9d080023-cf9c-41cb-a5e7-17801981562b\") " Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.201250 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.203687 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.210889 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-scripts" (OuterVolumeSpecName: "scripts") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.225254 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d080023-cf9c-41cb-a5e7-17801981562b-kube-api-access-zmm79" (OuterVolumeSpecName: "kube-api-access-zmm79") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "kube-api-access-zmm79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.225491 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.290815 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.295285 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-config-data" (OuterVolumeSpecName: "config-data") pod "9d080023-cf9c-41cb-a5e7-17801981562b" (UID: "9d080023-cf9c-41cb-a5e7-17801981562b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303154 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303183 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303193 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303222 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303232 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d080023-cf9c-41cb-a5e7-17801981562b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303242 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmm79\" (UniqueName: \"kubernetes.io/projected/9d080023-cf9c-41cb-a5e7-17801981562b-kube-api-access-zmm79\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.303257 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d080023-cf9c-41cb-a5e7-17801981562b-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.528733 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-f5gg9"] Dec 02 10:22:54 crc kubenswrapper[4685]: E1202 10:22:54.529461 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-central-agent" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529485 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-central-agent" Dec 02 10:22:54 crc kubenswrapper[4685]: E1202 10:22:54.529516 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-notification-agent" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529525 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-notification-agent" Dec 02 10:22:54 crc kubenswrapper[4685]: E1202 10:22:54.529547 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="proxy-httpd" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529572 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="proxy-httpd" Dec 02 10:22:54 crc kubenswrapper[4685]: E1202 10:22:54.529591 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="sg-core" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529599 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="sg-core" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529834 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-notification-agent" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529856 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="proxy-httpd" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529876 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="ceilometer-central-agent" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.529896 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" containerName="sg-core" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.530658 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.536031 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.536093 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.552240 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-f5gg9"] Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.610711 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.611065 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-config-data\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.611372 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-scripts\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.611493 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dg4b\" (UniqueName: \"kubernetes.io/projected/42b0ad2c-7829-421a-b059-d71a19bc9c8e-kube-api-access-8dg4b\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.714115 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-scripts\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.714193 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dg4b\" (UniqueName: \"kubernetes.io/projected/42b0ad2c-7829-421a-b059-d71a19bc9c8e-kube-api-access-8dg4b\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.714264 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.714327 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-config-data\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.724713 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.725390 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-scripts\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.725496 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-config-data\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.739595 4685 generic.go:334] "Generic (PLEG): container finished" podID="9d080023-cf9c-41cb-a5e7-17801981562b" containerID="501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f" exitCode=0 Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.739656 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerDied","Data":"501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f"} Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.739688 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d080023-cf9c-41cb-a5e7-17801981562b","Type":"ContainerDied","Data":"4dd790c087df84b4c3f3234c880d4132cfbaf8c4d6038018510e85c7696c94c6"} Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.739709 4685 scope.go:117] "RemoveContainer" containerID="78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.739931 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.747950 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dg4b\" (UniqueName: \"kubernetes.io/projected/42b0ad2c-7829-421a-b059-d71a19bc9c8e-kube-api-access-8dg4b\") pod \"nova-cell0-cell-mapping-f5gg9\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.751330 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.772748 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.783325 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.820676 4685 scope.go:117] "RemoveContainer" containerID="c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.831741 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.831853 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kpsz\" (UniqueName: \"kubernetes.io/projected/27f9b9ce-2044-491c-8172-3afec1a54bd8-kube-api-access-7kpsz\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.831896 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f9b9ce-2044-491c-8172-3afec1a54bd8-logs\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.832103 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-config-data\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.863028 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.890234 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.932118 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.947735 4685 scope.go:117] "RemoveContainer" containerID="6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.948669 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.948721 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kpsz\" (UniqueName: \"kubernetes.io/projected/27f9b9ce-2044-491c-8172-3afec1a54bd8-kube-api-access-7kpsz\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.948742 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f9b9ce-2044-491c-8172-3afec1a54bd8-logs\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.948819 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-config-data\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.948960 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.962532 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.962738 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f9b9ce-2044-491c-8172-3afec1a54bd8-logs\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.963451 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 10:22:54 crc kubenswrapper[4685]: I1202 10:22:54.964863 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-config-data\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.008227 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.023850 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.052724 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.052865 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhlwv\" (UniqueName: \"kubernetes.io/projected/ef382cd5-f660-46a5-865c-7e39daec2a01-kube-api-access-zhlwv\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.052954 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.054676 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.120688 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kpsz\" (UniqueName: \"kubernetes.io/projected/27f9b9ce-2044-491c-8172-3afec1a54bd8-kube-api-access-7kpsz\") pod \"nova-api-0\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " pod="openstack/nova-api-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.136950 4685 scope.go:117] "RemoveContainer" containerID="501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.142209 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.154525 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.154881 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhlwv\" (UniqueName: \"kubernetes.io/projected/ef382cd5-f660-46a5-865c-7e39daec2a01-kube-api-access-zhlwv\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.155032 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.158701 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.162628 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.173289 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.174063 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.181143 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.183323 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.191933 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.199370 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.203025 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.234600 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhlwv\" (UniqueName: \"kubernetes.io/projected/ef382cd5-f660-46a5-865c-7e39daec2a01-kube-api-access-zhlwv\") pod \"nova-cell1-novncproxy-0\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.243639 4685 scope.go:117] "RemoveContainer" containerID="78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee" Dec 02 10:22:55 crc kubenswrapper[4685]: E1202 10:22:55.249819 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee\": container with ID starting with 78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee not found: ID does not exist" containerID="78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.249864 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee"} err="failed to get container status \"78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee\": rpc error: code = NotFound desc = could not find container \"78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee\": container with ID starting with 78b6e08fa1054ae579f3db4a17c8be2b769b49b5670781b6f1fb0d422674faee not found: ID does not exist" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.249894 4685 scope.go:117] "RemoveContainer" containerID="c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381" Dec 02 10:22:55 crc kubenswrapper[4685]: E1202 10:22:55.257687 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381\": container with ID starting with c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381 not found: ID does not exist" containerID="c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.257740 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381"} err="failed to get container status \"c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381\": rpc error: code = NotFound desc = could not find container \"c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381\": container with ID starting with c0b8a2b1bceae7d033ab90e54dfeac2c58e5d9be26d33458e3004c762d223381 not found: ID does not exist" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.257774 4685 scope.go:117] "RemoveContainer" containerID="6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.258939 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p79cz\" (UniqueName: \"kubernetes.io/projected/e664f4ff-9501-444a-a04f-f65c8c99af93-kube-api-access-p79cz\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259014 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-scripts\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259070 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-config-data\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259093 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-config-data\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259144 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259188 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwclq\" (UniqueName: \"kubernetes.io/projected/ec96f8d4-a19d-4937-a546-5285f49d3dbd-kube-api-access-cwclq\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259203 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259240 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-log-httpd\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259264 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-run-httpd\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.259312 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.260093 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: E1202 10:22:55.261055 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce\": container with ID starting with 6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce not found: ID does not exist" containerID="6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.261197 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce"} err="failed to get container status \"6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce\": rpc error: code = NotFound desc = could not find container \"6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce\": container with ID starting with 6cacee588ab7fef5bbb8e67078b63569d27f535b4391e2811d5fcceb512aa8ce not found: ID does not exist" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.261314 4685 scope.go:117] "RemoveContainer" containerID="501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f" Dec 02 10:22:55 crc kubenswrapper[4685]: E1202 10:22:55.265817 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f\": container with ID starting with 501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f not found: ID does not exist" containerID="501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.266080 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f"} err="failed to get container status \"501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f\": rpc error: code = NotFound desc = could not find container \"501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f\": container with ID starting with 501c9d81ccdcaa5bd2e5fe7fa2c0a7cb8e9122c3375bb2f55a6a5abcb1bb617f not found: ID does not exist" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.281645 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.283861 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.290355 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.301109 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.365054 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.365931 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-run-httpd\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.365984 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366033 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366065 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p79cz\" (UniqueName: \"kubernetes.io/projected/e664f4ff-9501-444a-a04f-f65c8c99af93-kube-api-access-p79cz\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366101 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-config-data\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366135 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf9tf\" (UniqueName: \"kubernetes.io/projected/aa5d0b3b-488f-44f8-90d3-8cd077f40686-kube-api-access-tf9tf\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366160 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-scripts\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366181 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-config-data\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366205 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-config-data\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366261 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366318 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwclq\" (UniqueName: \"kubernetes.io/projected/ec96f8d4-a19d-4937-a546-5285f49d3dbd-kube-api-access-cwclq\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366340 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366376 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-log-httpd\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.366404 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5d0b3b-488f-44f8-90d3-8cd077f40686-logs\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.367408 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-run-httpd\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.371416 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-log-httpd\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.376406 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.381278 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-config-data\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.387912 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.389506 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.405479 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-scripts\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.399797 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-24s8m"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.420715 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p79cz\" (UniqueName: \"kubernetes.io/projected/e664f4ff-9501-444a-a04f-f65c8c99af93-kube-api-access-p79cz\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.422031 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-24s8m"] Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.422115 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.426609 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-config-data\") pod \"ceilometer-0\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.428125 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwclq\" (UniqueName: \"kubernetes.io/projected/ec96f8d4-a19d-4937-a546-5285f49d3dbd-kube-api-access-cwclq\") pod \"nova-scheduler-0\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.441744 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.469970 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-config-data\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470012 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470044 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf9tf\" (UniqueName: \"kubernetes.io/projected/aa5d0b3b-488f-44f8-90d3-8cd077f40686-kube-api-access-tf9tf\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470089 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s5w8\" (UniqueName: \"kubernetes.io/projected/a083ca66-9f25-4f06-85a1-f37986775b73-kube-api-access-5s5w8\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470107 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-config\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470154 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-svc\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470184 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5d0b3b-488f-44f8-90d3-8cd077f40686-logs\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470210 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470246 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.470274 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.473254 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5d0b3b-488f-44f8-90d3-8cd077f40686-logs\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.474003 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-config-data\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.478662 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.491087 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf9tf\" (UniqueName: \"kubernetes.io/projected/aa5d0b3b-488f-44f8-90d3-8cd077f40686-kube-api-access-tf9tf\") pod \"nova-metadata-0\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.513058 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.545934 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.571481 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.571551 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.571634 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.571698 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s5w8\" (UniqueName: \"kubernetes.io/projected/a083ca66-9f25-4f06-85a1-f37986775b73-kube-api-access-5s5w8\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.571725 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-config\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.571789 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-svc\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.572825 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-svc\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.573414 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.574035 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.574683 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.575532 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-config\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.600784 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s5w8\" (UniqueName: \"kubernetes.io/projected/a083ca66-9f25-4f06-85a1-f37986775b73-kube-api-access-5s5w8\") pod \"dnsmasq-dns-757b4f8459-24s8m\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.622080 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.783333 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.943810 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d080023-cf9c-41cb-a5e7-17801981562b" path="/var/lib/kubelet/pods/9d080023-cf9c-41cb-a5e7-17801981562b/volumes" Dec 02 10:22:55 crc kubenswrapper[4685]: I1202 10:22:55.948427 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-f5gg9"] Dec 02 10:22:55 crc kubenswrapper[4685]: W1202 10:22:55.956063 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42b0ad2c_7829_421a_b059_d71a19bc9c8e.slice/crio-8b54105a263208157e333d26b4f88b082719bcd13b404cce12124e9eeda216f3 WatchSource:0}: Error finding container 8b54105a263208157e333d26b4f88b082719bcd13b404cce12124e9eeda216f3: Status 404 returned error can't find the container with id 8b54105a263208157e333d26b4f88b082719bcd13b404cce12124e9eeda216f3 Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.176740 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.361734 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.371948 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.569959 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:22:56 crc kubenswrapper[4685]: W1202 10:22:56.584350 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa5d0b3b_488f_44f8_90d3_8cd077f40686.slice/crio-aa4ed3edeff2561698d3351c327d52ab265009e2d680e627c3b93d77de7ea73e WatchSource:0}: Error finding container aa4ed3edeff2561698d3351c327d52ab265009e2d680e627c3b93d77de7ea73e: Status 404 returned error can't find the container with id aa4ed3edeff2561698d3351c327d52ab265009e2d680e627c3b93d77de7ea73e Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.633218 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4lsm9"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.639835 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.644808 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.645222 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.654759 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.679416 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4lsm9"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.710658 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-24s8m"] Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.735713 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6vkb\" (UniqueName: \"kubernetes.io/projected/6bb811fb-5566-4c40-a281-0ea15a5360eb-kube-api-access-q6vkb\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.735758 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.735801 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-config-data\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.735821 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-scripts\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.809499 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27f9b9ce-2044-491c-8172-3afec1a54bd8","Type":"ContainerStarted","Data":"63ca359d6fc493ddc4565030b4249df94d771a7ad2db62ccc3be15184bee058f"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.813710 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerStarted","Data":"99f0554291c6e52251a7af4bbe619263e415674b1fce88397eacd2a9531e2c70"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.816611 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" event={"ID":"a083ca66-9f25-4f06-85a1-f37986775b73","Type":"ContainerStarted","Data":"60887ea3b0f17d6b23b601f8b3aa5a692be6f50d588d7ad6a2fbea78352af349"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.819012 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ec96f8d4-a19d-4937-a546-5285f49d3dbd","Type":"ContainerStarted","Data":"ce2e75823a421ca4724a73121248498c0ec0dc78c522b4146513055c49f1acbe"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.820364 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f5gg9" event={"ID":"42b0ad2c-7829-421a-b059-d71a19bc9c8e","Type":"ContainerStarted","Data":"0bf04bc3cada1cc2799f41365167ff3c538bf369911c4da160e6810380e1072e"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.820384 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f5gg9" event={"ID":"42b0ad2c-7829-421a-b059-d71a19bc9c8e","Type":"ContainerStarted","Data":"8b54105a263208157e333d26b4f88b082719bcd13b404cce12124e9eeda216f3"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.822357 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ef382cd5-f660-46a5-865c-7e39daec2a01","Type":"ContainerStarted","Data":"13e761485bf834926c9695ac8457f096fa6f9902d6958cfc4c7ad6376c250862"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.823176 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"aa5d0b3b-488f-44f8-90d3-8cd077f40686","Type":"ContainerStarted","Data":"aa4ed3edeff2561698d3351c327d52ab265009e2d680e627c3b93d77de7ea73e"} Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.838305 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6vkb\" (UniqueName: \"kubernetes.io/projected/6bb811fb-5566-4c40-a281-0ea15a5360eb-kube-api-access-q6vkb\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.838688 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.838767 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-config-data\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.839063 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-scripts\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.844603 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-config-data\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.848851 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-f5gg9" podStartSLOduration=2.848833528 podStartE2EDuration="2.848833528s" podCreationTimestamp="2025-12-02 10:22:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:56.8397535 +0000 UTC m=+1269.211527654" watchObservedRunningTime="2025-12-02 10:22:56.848833528 +0000 UTC m=+1269.220607682" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.859441 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-scripts\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.863248 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.871768 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6vkb\" (UniqueName: \"kubernetes.io/projected/6bb811fb-5566-4c40-a281-0ea15a5360eb-kube-api-access-q6vkb\") pod \"nova-cell1-conductor-db-sync-4lsm9\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:56 crc kubenswrapper[4685]: I1202 10:22:56.979783 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:22:57 crc kubenswrapper[4685]: I1202 10:22:57.595031 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4lsm9"] Dec 02 10:22:57 crc kubenswrapper[4685]: W1202 10:22:57.606144 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bb811fb_5566_4c40_a281_0ea15a5360eb.slice/crio-90dbc04e1460c0ce6d702ec376f9ae183e0d3a5aca93c8112ab206c05c466e2e WatchSource:0}: Error finding container 90dbc04e1460c0ce6d702ec376f9ae183e0d3a5aca93c8112ab206c05c466e2e: Status 404 returned error can't find the container with id 90dbc04e1460c0ce6d702ec376f9ae183e0d3a5aca93c8112ab206c05c466e2e Dec 02 10:22:57 crc kubenswrapper[4685]: I1202 10:22:57.847386 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerStarted","Data":"fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af"} Dec 02 10:22:57 crc kubenswrapper[4685]: I1202 10:22:57.849897 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" event={"ID":"6bb811fb-5566-4c40-a281-0ea15a5360eb","Type":"ContainerStarted","Data":"90dbc04e1460c0ce6d702ec376f9ae183e0d3a5aca93c8112ab206c05c466e2e"} Dec 02 10:22:57 crc kubenswrapper[4685]: I1202 10:22:57.861130 4685 generic.go:334] "Generic (PLEG): container finished" podID="a083ca66-9f25-4f06-85a1-f37986775b73" containerID="0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980" exitCode=0 Dec 02 10:22:57 crc kubenswrapper[4685]: I1202 10:22:57.861455 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" event={"ID":"a083ca66-9f25-4f06-85a1-f37986775b73","Type":"ContainerDied","Data":"0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980"} Dec 02 10:22:58 crc kubenswrapper[4685]: I1202 10:22:58.871870 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerStarted","Data":"1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60"} Dec 02 10:22:58 crc kubenswrapper[4685]: I1202 10:22:58.874211 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" event={"ID":"6bb811fb-5566-4c40-a281-0ea15a5360eb","Type":"ContainerStarted","Data":"cf8fd53219a24490a374951ef74c8bc7e33a0dac2e5726fac117ce27f551922e"} Dec 02 10:22:58 crc kubenswrapper[4685]: I1202 10:22:58.877252 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" event={"ID":"a083ca66-9f25-4f06-85a1-f37986775b73","Type":"ContainerStarted","Data":"105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde"} Dec 02 10:22:58 crc kubenswrapper[4685]: I1202 10:22:58.877714 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:22:58 crc kubenswrapper[4685]: I1202 10:22:58.928980 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" podStartSLOduration=3.928963644 podStartE2EDuration="3.928963644s" podCreationTimestamp="2025-12-02 10:22:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:58.927490883 +0000 UTC m=+1271.299265037" watchObservedRunningTime="2025-12-02 10:22:58.928963644 +0000 UTC m=+1271.300737798" Dec 02 10:22:58 crc kubenswrapper[4685]: I1202 10:22:58.929170 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" podStartSLOduration=2.9291637 podStartE2EDuration="2.9291637s" podCreationTimestamp="2025-12-02 10:22:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:22:58.903934249 +0000 UTC m=+1271.275708403" watchObservedRunningTime="2025-12-02 10:22:58.9291637 +0000 UTC m=+1271.300937854" Dec 02 10:22:59 crc kubenswrapper[4685]: I1202 10:22:59.027135 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:22:59 crc kubenswrapper[4685]: I1202 10:22:59.056233 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.931389 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerStarted","Data":"04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881"} Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.938749 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ec96f8d4-a19d-4937-a546-5285f49d3dbd","Type":"ContainerStarted","Data":"8d025602d0f655f35af5f8006b2f92b0fd1b657039b4fe8711fce61a8f5d9ef6"} Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.945600 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"aa5d0b3b-488f-44f8-90d3-8cd077f40686","Type":"ContainerStarted","Data":"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2"} Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.946612 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ef382cd5-f660-46a5-865c-7e39daec2a01","Type":"ContainerStarted","Data":"f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353"} Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.946728 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="ef382cd5-f660-46a5-865c-7e39daec2a01" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353" gracePeriod=30 Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.951949 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27f9b9ce-2044-491c-8172-3afec1a54bd8","Type":"ContainerStarted","Data":"613f5ffa7e422a8664aa2af1c475f263350da5c4aed0da930a02e1ed068492e8"} Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.963311 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.139467544 podStartE2EDuration="7.963288537s" podCreationTimestamp="2025-12-02 10:22:54 +0000 UTC" firstStartedPulling="2025-12-02 10:22:56.36881199 +0000 UTC m=+1268.740586144" lastFinishedPulling="2025-12-02 10:23:01.192632983 +0000 UTC m=+1273.564407137" observedRunningTime="2025-12-02 10:23:01.95831161 +0000 UTC m=+1274.330085774" watchObservedRunningTime="2025-12-02 10:23:01.963288537 +0000 UTC m=+1274.335062691" Dec 02 10:23:01 crc kubenswrapper[4685]: I1202 10:23:01.983874 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.155887883 podStartE2EDuration="7.983851129s" podCreationTimestamp="2025-12-02 10:22:54 +0000 UTC" firstStartedPulling="2025-12-02 10:22:56.363141665 +0000 UTC m=+1268.734915819" lastFinishedPulling="2025-12-02 10:23:01.191104911 +0000 UTC m=+1273.562879065" observedRunningTime="2025-12-02 10:23:01.976302263 +0000 UTC m=+1274.348076417" watchObservedRunningTime="2025-12-02 10:23:01.983851129 +0000 UTC m=+1274.355625283" Dec 02 10:23:02 crc kubenswrapper[4685]: I1202 10:23:02.964710 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"aa5d0b3b-488f-44f8-90d3-8cd077f40686","Type":"ContainerStarted","Data":"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338"} Dec 02 10:23:02 crc kubenswrapper[4685]: I1202 10:23:02.964790 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-log" containerID="cri-o://f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2" gracePeriod=30 Dec 02 10:23:02 crc kubenswrapper[4685]: I1202 10:23:02.964861 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-metadata" containerID="cri-o://57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338" gracePeriod=30 Dec 02 10:23:02 crc kubenswrapper[4685]: I1202 10:23:02.975946 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27f9b9ce-2044-491c-8172-3afec1a54bd8","Type":"ContainerStarted","Data":"f70b8e5af6ad12afb267f5352e6f3bec675be1fff59058bf53f79faa685ba78b"} Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.003541 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.406953006 podStartE2EDuration="8.003520169s" podCreationTimestamp="2025-12-02 10:22:55 +0000 UTC" firstStartedPulling="2025-12-02 10:22:56.597455518 +0000 UTC m=+1268.969229672" lastFinishedPulling="2025-12-02 10:23:01.194022681 +0000 UTC m=+1273.565796835" observedRunningTime="2025-12-02 10:23:02.992652531 +0000 UTC m=+1275.364426685" watchObservedRunningTime="2025-12-02 10:23:03.003520169 +0000 UTC m=+1275.375294323" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.028250 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.044444163 podStartE2EDuration="9.028234085s" podCreationTimestamp="2025-12-02 10:22:54 +0000 UTC" firstStartedPulling="2025-12-02 10:22:56.168722633 +0000 UTC m=+1268.540496787" lastFinishedPulling="2025-12-02 10:23:01.152512555 +0000 UTC m=+1273.524286709" observedRunningTime="2025-12-02 10:23:03.025608693 +0000 UTC m=+1275.397382847" watchObservedRunningTime="2025-12-02 10:23:03.028234085 +0000 UTC m=+1275.400008239" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.795929 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.907135 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5d0b3b-488f-44f8-90d3-8cd077f40686-logs\") pod \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.907194 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-config-data\") pod \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.907230 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-combined-ca-bundle\") pod \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.907267 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf9tf\" (UniqueName: \"kubernetes.io/projected/aa5d0b3b-488f-44f8-90d3-8cd077f40686-kube-api-access-tf9tf\") pod \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\" (UID: \"aa5d0b3b-488f-44f8-90d3-8cd077f40686\") " Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.907687 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa5d0b3b-488f-44f8-90d3-8cd077f40686-logs" (OuterVolumeSpecName: "logs") pod "aa5d0b3b-488f-44f8-90d3-8cd077f40686" (UID: "aa5d0b3b-488f-44f8-90d3-8cd077f40686"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.935820 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa5d0b3b-488f-44f8-90d3-8cd077f40686-kube-api-access-tf9tf" (OuterVolumeSpecName: "kube-api-access-tf9tf") pod "aa5d0b3b-488f-44f8-90d3-8cd077f40686" (UID: "aa5d0b3b-488f-44f8-90d3-8cd077f40686"). InnerVolumeSpecName "kube-api-access-tf9tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.950414 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-config-data" (OuterVolumeSpecName: "config-data") pod "aa5d0b3b-488f-44f8-90d3-8cd077f40686" (UID: "aa5d0b3b-488f-44f8-90d3-8cd077f40686"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.968245 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa5d0b3b-488f-44f8-90d3-8cd077f40686" (UID: "aa5d0b3b-488f-44f8-90d3-8cd077f40686"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.984772 4685 generic.go:334] "Generic (PLEG): container finished" podID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerID="57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338" exitCode=0 Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.984804 4685 generic.go:334] "Generic (PLEG): container finished" podID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerID="f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2" exitCode=143 Dec 02 10:23:03 crc kubenswrapper[4685]: I1202 10:23:03.984932 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.009091 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa5d0b3b-488f-44f8-90d3-8cd077f40686-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.009125 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.009134 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa5d0b3b-488f-44f8-90d3-8cd077f40686-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.009144 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf9tf\" (UniqueName: \"kubernetes.io/projected/aa5d0b3b-488f-44f8-90d3-8cd077f40686-kube-api-access-tf9tf\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.017910 4685 kubelet_pods.go:2476] "Failed to reduce cpu time for pod pending volume cleanup" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" err="openat2 /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa5d0b3b_488f_44f8_90d3_8cd077f40686.slice/cgroup.controllers: no such file or directory" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.017972 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"aa5d0b3b-488f-44f8-90d3-8cd077f40686","Type":"ContainerDied","Data":"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338"} Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.018005 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.018015 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"aa5d0b3b-488f-44f8-90d3-8cd077f40686","Type":"ContainerDied","Data":"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2"} Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.018049 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"aa5d0b3b-488f-44f8-90d3-8cd077f40686","Type":"ContainerDied","Data":"aa4ed3edeff2561698d3351c327d52ab265009e2d680e627c3b93d77de7ea73e"} Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.018078 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerStarted","Data":"13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114"} Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.018141 4685 scope.go:117] "RemoveContainer" containerID="57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.024363 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.749658354 podStartE2EDuration="10.02434614s" podCreationTimestamp="2025-12-02 10:22:54 +0000 UTC" firstStartedPulling="2025-12-02 10:22:56.655570038 +0000 UTC m=+1269.027344192" lastFinishedPulling="2025-12-02 10:23:02.930257824 +0000 UTC m=+1275.302031978" observedRunningTime="2025-12-02 10:23:04.018967413 +0000 UTC m=+1276.390741567" watchObservedRunningTime="2025-12-02 10:23:04.02434614 +0000 UTC m=+1276.396120294" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.054603 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.064797 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.075346 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:04 crc kubenswrapper[4685]: E1202 10:23:04.075783 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-metadata" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.075802 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-metadata" Dec 02 10:23:04 crc kubenswrapper[4685]: E1202 10:23:04.075847 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-log" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.075853 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-log" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.076053 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-metadata" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.076112 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" containerName="nova-metadata-log" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.078300 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.080873 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.081052 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.090704 4685 scope.go:117] "RemoveContainer" containerID="f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.094404 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.121289 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.121373 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.121427 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc58f14c-cdb0-4649-8838-fa307d5ca801-logs\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.121475 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-config-data\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.121527 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zg6q\" (UniqueName: \"kubernetes.io/projected/cc58f14c-cdb0-4649-8838-fa307d5ca801-kube-api-access-9zg6q\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.151637 4685 scope.go:117] "RemoveContainer" containerID="57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338" Dec 02 10:23:04 crc kubenswrapper[4685]: E1202 10:23:04.155814 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338\": container with ID starting with 57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338 not found: ID does not exist" containerID="57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.155867 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338"} err="failed to get container status \"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338\": rpc error: code = NotFound desc = could not find container \"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338\": container with ID starting with 57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338 not found: ID does not exist" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.155896 4685 scope.go:117] "RemoveContainer" containerID="f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2" Dec 02 10:23:04 crc kubenswrapper[4685]: E1202 10:23:04.156357 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2\": container with ID starting with f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2 not found: ID does not exist" containerID="f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.156374 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2"} err="failed to get container status \"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2\": rpc error: code = NotFound desc = could not find container \"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2\": container with ID starting with f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2 not found: ID does not exist" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.156385 4685 scope.go:117] "RemoveContainer" containerID="57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.156675 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338"} err="failed to get container status \"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338\": rpc error: code = NotFound desc = could not find container \"57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338\": container with ID starting with 57b5385148db4231bb2c8d6f01b0982f4234ee3de51068611a1edaceb8662338 not found: ID does not exist" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.156727 4685 scope.go:117] "RemoveContainer" containerID="f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.157018 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2"} err="failed to get container status \"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2\": rpc error: code = NotFound desc = could not find container \"f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2\": container with ID starting with f4ebbbdc3fb6cc3947095a17ee76a5d5608b590816d27da364ba05605b8aa0b2 not found: ID does not exist" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.222915 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc58f14c-cdb0-4649-8838-fa307d5ca801-logs\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.222992 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-config-data\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.223041 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zg6q\" (UniqueName: \"kubernetes.io/projected/cc58f14c-cdb0-4649-8838-fa307d5ca801-kube-api-access-9zg6q\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.223086 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.223138 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.225104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc58f14c-cdb0-4649-8838-fa307d5ca801-logs\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.229512 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.230151 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-config-data\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.231269 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.244153 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zg6q\" (UniqueName: \"kubernetes.io/projected/cc58f14c-cdb0-4649-8838-fa307d5ca801-kube-api-access-9zg6q\") pod \"nova-metadata-0\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.411381 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:04 crc kubenswrapper[4685]: I1202 10:23:04.876765 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:04 crc kubenswrapper[4685]: W1202 10:23:04.885061 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc58f14c_cdb0_4649_8838_fa307d5ca801.slice/crio-c103f1e8ce4f6db599773572dfc7eed2603ec164592cebbb0db462e0f1f2aedd WatchSource:0}: Error finding container c103f1e8ce4f6db599773572dfc7eed2603ec164592cebbb0db462e0f1f2aedd: Status 404 returned error can't find the container with id c103f1e8ce4f6db599773572dfc7eed2603ec164592cebbb0db462e0f1f2aedd Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.008232 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc58f14c-cdb0-4649-8838-fa307d5ca801","Type":"ContainerStarted","Data":"c103f1e8ce4f6db599773572dfc7eed2603ec164592cebbb0db462e0f1f2aedd"} Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.365856 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.365928 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.442625 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.514650 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.514700 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.632835 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.788462 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.928451 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa5d0b3b-488f-44f8-90d3-8cd077f40686" path="/var/lib/kubelet/pods/aa5d0b3b-488f-44f8-90d3-8cd077f40686/volumes" Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.929585 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-q9z92"] Dec 02 10:23:05 crc kubenswrapper[4685]: I1202 10:23:05.929819 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerName="dnsmasq-dns" containerID="cri-o://2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6" gracePeriod=10 Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.050407 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc58f14c-cdb0-4649-8838-fa307d5ca801","Type":"ContainerStarted","Data":"7cc8cae29b5a5ab5aee01cb7fd21265361c1582c3425ebea00a1f527a25a6905"} Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.050457 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc58f14c-cdb0-4649-8838-fa307d5ca801","Type":"ContainerStarted","Data":"26b9627eed2192c35b7f016bea9daa68af8247242ddcdb79318cb2b1b5ab7c2f"} Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.217930 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.262910 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.262890842 podStartE2EDuration="2.262890842s" podCreationTimestamp="2025-12-02 10:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:06.0998564 +0000 UTC m=+1278.471630554" watchObservedRunningTime="2025-12-02 10:23:06.262890842 +0000 UTC m=+1278.634664996" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.450857 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.451275 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.675726 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.811524 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-sb\") pod \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.811668 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4s5d\" (UniqueName: \"kubernetes.io/projected/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-kube-api-access-v4s5d\") pod \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.811716 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-config\") pod \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.811735 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-swift-storage-0\") pod \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.811765 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-nb\") pod \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.811840 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-svc\") pod \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\" (UID: \"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc\") " Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.838886 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-kube-api-access-v4s5d" (OuterVolumeSpecName: "kube-api-access-v4s5d") pod "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" (UID: "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc"). InnerVolumeSpecName "kube-api-access-v4s5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.889502 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-config" (OuterVolumeSpecName: "config") pod "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" (UID: "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.913931 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4s5d\" (UniqueName: \"kubernetes.io/projected/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-kube-api-access-v4s5d\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.913970 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.914169 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" (UID: "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.916594 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" (UID: "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.951058 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" (UID: "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:06 crc kubenswrapper[4685]: I1202 10:23:06.981952 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" (UID: "9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.015813 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.015859 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.015909 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.015934 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.059331 4685 generic.go:334] "Generic (PLEG): container finished" podID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerID="2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6" exitCode=0 Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.059385 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" event={"ID":"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc","Type":"ContainerDied","Data":"2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6"} Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.059411 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" event={"ID":"9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc","Type":"ContainerDied","Data":"a1a64337d89352a0b90296221c226772b284833d2848be8c698b9706f2163825"} Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.059427 4685 scope.go:117] "RemoveContainer" containerID="2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.059520 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-q9z92" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.065275 4685 generic.go:334] "Generic (PLEG): container finished" podID="42b0ad2c-7829-421a-b059-d71a19bc9c8e" containerID="0bf04bc3cada1cc2799f41365167ff3c538bf369911c4da160e6810380e1072e" exitCode=0 Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.066286 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f5gg9" event={"ID":"42b0ad2c-7829-421a-b059-d71a19bc9c8e","Type":"ContainerDied","Data":"0bf04bc3cada1cc2799f41365167ff3c538bf369911c4da160e6810380e1072e"} Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.090506 4685 scope.go:117] "RemoveContainer" containerID="194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.116722 4685 scope.go:117] "RemoveContainer" containerID="2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6" Dec 02 10:23:07 crc kubenswrapper[4685]: E1202 10:23:07.117232 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6\": container with ID starting with 2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6 not found: ID does not exist" containerID="2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.117262 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6"} err="failed to get container status \"2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6\": rpc error: code = NotFound desc = could not find container \"2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6\": container with ID starting with 2f6996c02d5190dc22cec08b568160d478f97b623eedea2a806391b9a6a942a6 not found: ID does not exist" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.117284 4685 scope.go:117] "RemoveContainer" containerID="194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5" Dec 02 10:23:07 crc kubenswrapper[4685]: E1202 10:23:07.117457 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5\": container with ID starting with 194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5 not found: ID does not exist" containerID="194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.117483 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5"} err="failed to get container status \"194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5\": rpc error: code = NotFound desc = could not find container \"194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5\": container with ID starting with 194de67217c1b82b254347fbd5bbd4308b67b2e9014174e41d931854d86234e5 not found: ID does not exist" Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.163649 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-q9z92"] Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.174483 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-q9z92"] Dec 02 10:23:07 crc kubenswrapper[4685]: I1202 10:23:07.914730 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" path="/var/lib/kubelet/pods/9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc/volumes" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.668754 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.753285 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-scripts\") pod \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.753347 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-config-data\") pod \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.753484 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dg4b\" (UniqueName: \"kubernetes.io/projected/42b0ad2c-7829-421a-b059-d71a19bc9c8e-kube-api-access-8dg4b\") pod \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.753568 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-combined-ca-bundle\") pod \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\" (UID: \"42b0ad2c-7829-421a-b059-d71a19bc9c8e\") " Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.768123 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-scripts" (OuterVolumeSpecName: "scripts") pod "42b0ad2c-7829-421a-b059-d71a19bc9c8e" (UID: "42b0ad2c-7829-421a-b059-d71a19bc9c8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.779098 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42b0ad2c-7829-421a-b059-d71a19bc9c8e-kube-api-access-8dg4b" (OuterVolumeSpecName: "kube-api-access-8dg4b") pod "42b0ad2c-7829-421a-b059-d71a19bc9c8e" (UID: "42b0ad2c-7829-421a-b059-d71a19bc9c8e"). InnerVolumeSpecName "kube-api-access-8dg4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.786826 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42b0ad2c-7829-421a-b059-d71a19bc9c8e" (UID: "42b0ad2c-7829-421a-b059-d71a19bc9c8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.794009 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-config-data" (OuterVolumeSpecName: "config-data") pod "42b0ad2c-7829-421a-b059-d71a19bc9c8e" (UID: "42b0ad2c-7829-421a-b059-d71a19bc9c8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.856077 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dg4b\" (UniqueName: \"kubernetes.io/projected/42b0ad2c-7829-421a-b059-d71a19bc9c8e-kube-api-access-8dg4b\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.856114 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.856125 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:08 crc kubenswrapper[4685]: I1202 10:23:08.856133 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42b0ad2c-7829-421a-b059-d71a19bc9c8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.084475 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-f5gg9" event={"ID":"42b0ad2c-7829-421a-b059-d71a19bc9c8e","Type":"ContainerDied","Data":"8b54105a263208157e333d26b4f88b082719bcd13b404cce12124e9eeda216f3"} Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.084512 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-f5gg9" Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.084518 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b54105a263208157e333d26b4f88b082719bcd13b404cce12124e9eeda216f3" Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.296271 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.296485 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="ec96f8d4-a19d-4937-a546-5285f49d3dbd" containerName="nova-scheduler-scheduler" containerID="cri-o://8d025602d0f655f35af5f8006b2f92b0fd1b657039b4fe8711fce61a8f5d9ef6" gracePeriod=30 Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.318639 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.318930 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-log" containerID="cri-o://613f5ffa7e422a8664aa2af1c475f263350da5c4aed0da930a02e1ed068492e8" gracePeriod=30 Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.319244 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-api" containerID="cri-o://f70b8e5af6ad12afb267f5352e6f3bec675be1fff59058bf53f79faa685ba78b" gracePeriod=30 Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.351748 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.352214 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-metadata" containerID="cri-o://7cc8cae29b5a5ab5aee01cb7fd21265361c1582c3425ebea00a1f527a25a6905" gracePeriod=30 Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.352078 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-log" containerID="cri-o://26b9627eed2192c35b7f016bea9daa68af8247242ddcdb79318cb2b1b5ab7c2f" gracePeriod=30 Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.411990 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 10:23:09 crc kubenswrapper[4685]: I1202 10:23:09.412035 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.100662 4685 generic.go:334] "Generic (PLEG): container finished" podID="ec96f8d4-a19d-4937-a546-5285f49d3dbd" containerID="8d025602d0f655f35af5f8006b2f92b0fd1b657039b4fe8711fce61a8f5d9ef6" exitCode=0 Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.101018 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ec96f8d4-a19d-4937-a546-5285f49d3dbd","Type":"ContainerDied","Data":"8d025602d0f655f35af5f8006b2f92b0fd1b657039b4fe8711fce61a8f5d9ef6"} Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.102739 4685 generic.go:334] "Generic (PLEG): container finished" podID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerID="7cc8cae29b5a5ab5aee01cb7fd21265361c1582c3425ebea00a1f527a25a6905" exitCode=0 Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.102764 4685 generic.go:334] "Generic (PLEG): container finished" podID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerID="26b9627eed2192c35b7f016bea9daa68af8247242ddcdb79318cb2b1b5ab7c2f" exitCode=143 Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.102802 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc58f14c-cdb0-4649-8838-fa307d5ca801","Type":"ContainerDied","Data":"7cc8cae29b5a5ab5aee01cb7fd21265361c1582c3425ebea00a1f527a25a6905"} Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.102823 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc58f14c-cdb0-4649-8838-fa307d5ca801","Type":"ContainerDied","Data":"26b9627eed2192c35b7f016bea9daa68af8247242ddcdb79318cb2b1b5ab7c2f"} Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.112997 4685 generic.go:334] "Generic (PLEG): container finished" podID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerID="613f5ffa7e422a8664aa2af1c475f263350da5c4aed0da930a02e1ed068492e8" exitCode=143 Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.113270 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27f9b9ce-2044-491c-8172-3afec1a54bd8","Type":"ContainerDied","Data":"613f5ffa7e422a8664aa2af1c475f263350da5c4aed0da930a02e1ed068492e8"} Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.115064 4685 generic.go:334] "Generic (PLEG): container finished" podID="6bb811fb-5566-4c40-a281-0ea15a5360eb" containerID="cf8fd53219a24490a374951ef74c8bc7e33a0dac2e5726fac117ce27f551922e" exitCode=0 Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.115092 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" event={"ID":"6bb811fb-5566-4c40-a281-0ea15a5360eb","Type":"ContainerDied","Data":"cf8fd53219a24490a374951ef74c8bc7e33a0dac2e5726fac117ce27f551922e"} Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.292677 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.397213 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-nova-metadata-tls-certs\") pod \"cc58f14c-cdb0-4649-8838-fa307d5ca801\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.397499 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc58f14c-cdb0-4649-8838-fa307d5ca801-logs\") pod \"cc58f14c-cdb0-4649-8838-fa307d5ca801\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.397663 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zg6q\" (UniqueName: \"kubernetes.io/projected/cc58f14c-cdb0-4649-8838-fa307d5ca801-kube-api-access-9zg6q\") pod \"cc58f14c-cdb0-4649-8838-fa307d5ca801\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.397863 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-config-data\") pod \"cc58f14c-cdb0-4649-8838-fa307d5ca801\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.397938 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc58f14c-cdb0-4649-8838-fa307d5ca801-logs" (OuterVolumeSpecName: "logs") pod "cc58f14c-cdb0-4649-8838-fa307d5ca801" (UID: "cc58f14c-cdb0-4649-8838-fa307d5ca801"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.398141 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-combined-ca-bundle\") pod \"cc58f14c-cdb0-4649-8838-fa307d5ca801\" (UID: \"cc58f14c-cdb0-4649-8838-fa307d5ca801\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.398765 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc58f14c-cdb0-4649-8838-fa307d5ca801-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.405245 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc58f14c-cdb0-4649-8838-fa307d5ca801-kube-api-access-9zg6q" (OuterVolumeSpecName: "kube-api-access-9zg6q") pod "cc58f14c-cdb0-4649-8838-fa307d5ca801" (UID: "cc58f14c-cdb0-4649-8838-fa307d5ca801"). InnerVolumeSpecName "kube-api-access-9zg6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.460670 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "cc58f14c-cdb0-4649-8838-fa307d5ca801" (UID: "cc58f14c-cdb0-4649-8838-fa307d5ca801"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.463669 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc58f14c-cdb0-4649-8838-fa307d5ca801" (UID: "cc58f14c-cdb0-4649-8838-fa307d5ca801"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.471068 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.472802 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-config-data" (OuterVolumeSpecName: "config-data") pod "cc58f14c-cdb0-4649-8838-fa307d5ca801" (UID: "cc58f14c-cdb0-4649-8838-fa307d5ca801"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.500187 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zg6q\" (UniqueName: \"kubernetes.io/projected/cc58f14c-cdb0-4649-8838-fa307d5ca801-kube-api-access-9zg6q\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.500422 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.500488 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.500575 4685 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc58f14c-cdb0-4649-8838-fa307d5ca801-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.602274 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwclq\" (UniqueName: \"kubernetes.io/projected/ec96f8d4-a19d-4937-a546-5285f49d3dbd-kube-api-access-cwclq\") pod \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.602462 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-combined-ca-bundle\") pod \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.602518 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-config-data\") pod \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\" (UID: \"ec96f8d4-a19d-4937-a546-5285f49d3dbd\") " Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.609702 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec96f8d4-a19d-4937-a546-5285f49d3dbd-kube-api-access-cwclq" (OuterVolumeSpecName: "kube-api-access-cwclq") pod "ec96f8d4-a19d-4937-a546-5285f49d3dbd" (UID: "ec96f8d4-a19d-4937-a546-5285f49d3dbd"). InnerVolumeSpecName "kube-api-access-cwclq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.632669 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-config-data" (OuterVolumeSpecName: "config-data") pod "ec96f8d4-a19d-4937-a546-5285f49d3dbd" (UID: "ec96f8d4-a19d-4937-a546-5285f49d3dbd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.640079 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec96f8d4-a19d-4937-a546-5285f49d3dbd" (UID: "ec96f8d4-a19d-4937-a546-5285f49d3dbd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.705057 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.705097 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec96f8d4-a19d-4937-a546-5285f49d3dbd-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:10 crc kubenswrapper[4685]: I1202 10:23:10.705107 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwclq\" (UniqueName: \"kubernetes.io/projected/ec96f8d4-a19d-4937-a546-5285f49d3dbd-kube-api-access-cwclq\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.126997 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.128305 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ec96f8d4-a19d-4937-a546-5285f49d3dbd","Type":"ContainerDied","Data":"ce2e75823a421ca4724a73121248498c0ec0dc78c522b4146513055c49f1acbe"} Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.129487 4685 scope.go:117] "RemoveContainer" containerID="8d025602d0f655f35af5f8006b2f92b0fd1b657039b4fe8711fce61a8f5d9ef6" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.130299 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cc58f14c-cdb0-4649-8838-fa307d5ca801","Type":"ContainerDied","Data":"c103f1e8ce4f6db599773572dfc7eed2603ec164592cebbb0db462e0f1f2aedd"} Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.130318 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.182776 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.187215 4685 scope.go:117] "RemoveContainer" containerID="7cc8cae29b5a5ab5aee01cb7fd21265361c1582c3425ebea00a1f527a25a6905" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.206923 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.219438 4685 scope.go:117] "RemoveContainer" containerID="26b9627eed2192c35b7f016bea9daa68af8247242ddcdb79318cb2b1b5ab7c2f" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.228181 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.240714 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264104 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: E1202 10:23:11.264538 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerName="dnsmasq-dns" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264551 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerName="dnsmasq-dns" Dec 02 10:23:11 crc kubenswrapper[4685]: E1202 10:23:11.264580 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42b0ad2c-7829-421a-b059-d71a19bc9c8e" containerName="nova-manage" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264587 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="42b0ad2c-7829-421a-b059-d71a19bc9c8e" containerName="nova-manage" Dec 02 10:23:11 crc kubenswrapper[4685]: E1202 10:23:11.264602 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-log" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264609 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-log" Dec 02 10:23:11 crc kubenswrapper[4685]: E1202 10:23:11.264626 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-metadata" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264632 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-metadata" Dec 02 10:23:11 crc kubenswrapper[4685]: E1202 10:23:11.264644 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerName="init" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264650 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerName="init" Dec 02 10:23:11 crc kubenswrapper[4685]: E1202 10:23:11.264678 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec96f8d4-a19d-4937-a546-5285f49d3dbd" containerName="nova-scheduler-scheduler" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264685 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec96f8d4-a19d-4937-a546-5285f49d3dbd" containerName="nova-scheduler-scheduler" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264866 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-log" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264883 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" containerName="nova-metadata-metadata" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264894 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c751a52-1597-4bbf-b9d7-79e8bcf0f9dc" containerName="dnsmasq-dns" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264910 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec96f8d4-a19d-4937-a546-5285f49d3dbd" containerName="nova-scheduler-scheduler" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.264923 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="42b0ad2c-7829-421a-b059-d71a19bc9c8e" containerName="nova-manage" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.266032 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.278796 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.279094 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.291001 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.299722 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.303934 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.315477 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-config-data\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.315549 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75218bc5-a1d9-463d-8837-7b36d60738d5-logs\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.315606 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddslb\" (UniqueName: \"kubernetes.io/projected/75218bc5-a1d9-463d-8837-7b36d60738d5-kube-api-access-ddslb\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.315685 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.315728 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.334614 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.355652 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417458 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417504 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417539 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-config-data\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417613 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417644 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-config-data\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417791 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hbpj\" (UniqueName: \"kubernetes.io/projected/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-kube-api-access-2hbpj\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417869 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75218bc5-a1d9-463d-8837-7b36d60738d5-logs\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.417938 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddslb\" (UniqueName: \"kubernetes.io/projected/75218bc5-a1d9-463d-8837-7b36d60738d5-kube-api-access-ddslb\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.418974 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75218bc5-a1d9-463d-8837-7b36d60738d5-logs\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.422197 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.423553 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-config-data\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.426935 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.447189 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddslb\" (UniqueName: \"kubernetes.io/projected/75218bc5-a1d9-463d-8837-7b36d60738d5-kube-api-access-ddslb\") pod \"nova-metadata-0\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.519797 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-config-data\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.519876 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.519924 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hbpj\" (UniqueName: \"kubernetes.io/projected/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-kube-api-access-2hbpj\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.525100 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-config-data\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.526052 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.537520 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hbpj\" (UniqueName: \"kubernetes.io/projected/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-kube-api-access-2hbpj\") pod \"nova-scheduler-0\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.599618 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.628109 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.650064 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.723282 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6vkb\" (UniqueName: \"kubernetes.io/projected/6bb811fb-5566-4c40-a281-0ea15a5360eb-kube-api-access-q6vkb\") pod \"6bb811fb-5566-4c40-a281-0ea15a5360eb\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.723478 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-combined-ca-bundle\") pod \"6bb811fb-5566-4c40-a281-0ea15a5360eb\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.723714 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-config-data\") pod \"6bb811fb-5566-4c40-a281-0ea15a5360eb\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.724060 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-scripts\") pod \"6bb811fb-5566-4c40-a281-0ea15a5360eb\" (UID: \"6bb811fb-5566-4c40-a281-0ea15a5360eb\") " Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.746016 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bb811fb-5566-4c40-a281-0ea15a5360eb-kube-api-access-q6vkb" (OuterVolumeSpecName: "kube-api-access-q6vkb") pod "6bb811fb-5566-4c40-a281-0ea15a5360eb" (UID: "6bb811fb-5566-4c40-a281-0ea15a5360eb"). InnerVolumeSpecName "kube-api-access-q6vkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.753711 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-scripts" (OuterVolumeSpecName: "scripts") pod "6bb811fb-5566-4c40-a281-0ea15a5360eb" (UID: "6bb811fb-5566-4c40-a281-0ea15a5360eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.756755 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-config-data" (OuterVolumeSpecName: "config-data") pod "6bb811fb-5566-4c40-a281-0ea15a5360eb" (UID: "6bb811fb-5566-4c40-a281-0ea15a5360eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.824486 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6bb811fb-5566-4c40-a281-0ea15a5360eb" (UID: "6bb811fb-5566-4c40-a281-0ea15a5360eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.832100 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.832128 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6vkb\" (UniqueName: \"kubernetes.io/projected/6bb811fb-5566-4c40-a281-0ea15a5360eb-kube-api-access-q6vkb\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.832139 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.832147 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb811fb-5566-4c40-a281-0ea15a5360eb-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.937292 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc58f14c-cdb0-4649-8838-fa307d5ca801" path="/var/lib/kubelet/pods/cc58f14c-cdb0-4649-8838-fa307d5ca801/volumes" Dec 02 10:23:11 crc kubenswrapper[4685]: I1202 10:23:11.938046 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec96f8d4-a19d-4937-a546-5285f49d3dbd" path="/var/lib/kubelet/pods/ec96f8d4-a19d-4937-a546-5285f49d3dbd/volumes" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.155393 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.158524 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.177269 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.177888 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4lsm9" event={"ID":"6bb811fb-5566-4c40-a281-0ea15a5360eb","Type":"ContainerDied","Data":"90dbc04e1460c0ce6d702ec376f9ae183e0d3a5aca93c8112ab206c05c466e2e"} Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.178095 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90dbc04e1460c0ce6d702ec376f9ae183e0d3a5aca93c8112ab206c05c466e2e" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.221820 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 10:23:12 crc kubenswrapper[4685]: E1202 10:23:12.222238 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bb811fb-5566-4c40-a281-0ea15a5360eb" containerName="nova-cell1-conductor-db-sync" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.222258 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bb811fb-5566-4c40-a281-0ea15a5360eb" containerName="nova-cell1-conductor-db-sync" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.222498 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bb811fb-5566-4c40-a281-0ea15a5360eb" containerName="nova-cell1-conductor-db-sync" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.223147 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.230789 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.248748 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.248797 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rj42\" (UniqueName: \"kubernetes.io/projected/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-kube-api-access-6rj42\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.248834 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.287724 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.350340 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.350388 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rj42\" (UniqueName: \"kubernetes.io/projected/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-kube-api-access-6rj42\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.350425 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.356365 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.357385 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.373753 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rj42\" (UniqueName: \"kubernetes.io/projected/6d403a63-e543-4bc9-8f38-daaee1ceb4e6-kube-api-access-6rj42\") pod \"nova-cell1-conductor-0\" (UID: \"6d403a63-e543-4bc9-8f38-daaee1ceb4e6\") " pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.502315 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.561814 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:12 crc kubenswrapper[4685]: I1202 10:23:12.621827 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.058714 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.217656 4685 generic.go:334] "Generic (PLEG): container finished" podID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerID="f70b8e5af6ad12afb267f5352e6f3bec675be1fff59058bf53f79faa685ba78b" exitCode=0 Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.217680 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27f9b9ce-2044-491c-8172-3afec1a54bd8","Type":"ContainerDied","Data":"f70b8e5af6ad12afb267f5352e6f3bec675be1fff59058bf53f79faa685ba78b"} Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.220623 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75218bc5-a1d9-463d-8837-7b36d60738d5","Type":"ContainerStarted","Data":"f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f"} Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.220684 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75218bc5-a1d9-463d-8837-7b36d60738d5","Type":"ContainerStarted","Data":"94eeb92cc92f3c7984f7b6828d8082592ec1f7440e2923a3ad8907ec58d8ee70"} Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.223287 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"afc109bc-598b-4aa5-b7d6-1dac9593c3b8","Type":"ContainerStarted","Data":"84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89"} Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.223328 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"afc109bc-598b-4aa5-b7d6-1dac9593c3b8","Type":"ContainerStarted","Data":"b61a9a475a90c18d6d7e1343cb55b3eb1367f7ea4aacf774999b8318f17c4f55"} Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.230739 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d403a63-e543-4bc9-8f38-daaee1ceb4e6","Type":"ContainerStarted","Data":"78cc0d792b804e95e430d7a4008944d625b2dc83904020224b4179771197eee9"} Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.258730 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.2587012140000002 podStartE2EDuration="2.258701214s" podCreationTimestamp="2025-12-02 10:23:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:13.242374657 +0000 UTC m=+1285.614148841" watchObservedRunningTime="2025-12-02 10:23:13.258701214 +0000 UTC m=+1285.630475378" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.326650 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.412248 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-combined-ca-bundle\") pod \"27f9b9ce-2044-491c-8172-3afec1a54bd8\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.412371 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-config-data\") pod \"27f9b9ce-2044-491c-8172-3afec1a54bd8\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.412405 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kpsz\" (UniqueName: \"kubernetes.io/projected/27f9b9ce-2044-491c-8172-3afec1a54bd8-kube-api-access-7kpsz\") pod \"27f9b9ce-2044-491c-8172-3afec1a54bd8\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.412430 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f9b9ce-2044-491c-8172-3afec1a54bd8-logs\") pod \"27f9b9ce-2044-491c-8172-3afec1a54bd8\" (UID: \"27f9b9ce-2044-491c-8172-3afec1a54bd8\") " Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.413132 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27f9b9ce-2044-491c-8172-3afec1a54bd8-logs" (OuterVolumeSpecName: "logs") pod "27f9b9ce-2044-491c-8172-3afec1a54bd8" (UID: "27f9b9ce-2044-491c-8172-3afec1a54bd8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.425850 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f9b9ce-2044-491c-8172-3afec1a54bd8-kube-api-access-7kpsz" (OuterVolumeSpecName: "kube-api-access-7kpsz") pod "27f9b9ce-2044-491c-8172-3afec1a54bd8" (UID: "27f9b9ce-2044-491c-8172-3afec1a54bd8"). InnerVolumeSpecName "kube-api-access-7kpsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.466880 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27f9b9ce-2044-491c-8172-3afec1a54bd8" (UID: "27f9b9ce-2044-491c-8172-3afec1a54bd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.473852 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-config-data" (OuterVolumeSpecName: "config-data") pod "27f9b9ce-2044-491c-8172-3afec1a54bd8" (UID: "27f9b9ce-2044-491c-8172-3afec1a54bd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.514110 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.514146 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f9b9ce-2044-491c-8172-3afec1a54bd8-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.514158 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kpsz\" (UniqueName: \"kubernetes.io/projected/27f9b9ce-2044-491c-8172-3afec1a54bd8-kube-api-access-7kpsz\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:13 crc kubenswrapper[4685]: I1202 10:23:13.514170 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27f9b9ce-2044-491c-8172-3afec1a54bd8-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.241257 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75218bc5-a1d9-463d-8837-7b36d60738d5","Type":"ContainerStarted","Data":"5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673"} Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.242919 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6d403a63-e543-4bc9-8f38-daaee1ceb4e6","Type":"ContainerStarted","Data":"6744a3b15a5bbfacf4f9507483cf6a1e19d019449564582700fb9e90f4adca11"} Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.243005 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.244951 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"27f9b9ce-2044-491c-8172-3afec1a54bd8","Type":"ContainerDied","Data":"63ca359d6fc493ddc4565030b4249df94d771a7ad2db62ccc3be15184bee058f"} Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.245028 4685 scope.go:117] "RemoveContainer" containerID="f70b8e5af6ad12afb267f5352e6f3bec675be1fff59058bf53f79faa685ba78b" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.244974 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.266039 4685 scope.go:117] "RemoveContainer" containerID="613f5ffa7e422a8664aa2af1c475f263350da5c4aed0da930a02e1ed068492e8" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.273621 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.273597803 podStartE2EDuration="3.273597803s" podCreationTimestamp="2025-12-02 10:23:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:14.271856546 +0000 UTC m=+1286.643630730" watchObservedRunningTime="2025-12-02 10:23:14.273597803 +0000 UTC m=+1286.645371957" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.306870 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.306845603 podStartE2EDuration="2.306845603s" podCreationTimestamp="2025-12-02 10:23:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:14.297624081 +0000 UTC m=+1286.669398245" watchObservedRunningTime="2025-12-02 10:23:14.306845603 +0000 UTC m=+1286.678619767" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.354937 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.379526 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.416655 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:14 crc kubenswrapper[4685]: E1202 10:23:14.417178 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-api" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.417221 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-api" Dec 02 10:23:14 crc kubenswrapper[4685]: E1202 10:23:14.417241 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-log" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.417248 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-log" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.417447 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-log" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.417463 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" containerName="nova-api-api" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.418650 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.421007 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.435464 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.549148 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-config-data\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.549199 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6073eed1-f054-4ea2-bd72-407c3d293cb0-logs\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.549244 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.549315 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5xvw\" (UniqueName: \"kubernetes.io/projected/6073eed1-f054-4ea2-bd72-407c3d293cb0-kube-api-access-l5xvw\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.651345 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5xvw\" (UniqueName: \"kubernetes.io/projected/6073eed1-f054-4ea2-bd72-407c3d293cb0-kube-api-access-l5xvw\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.651483 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-config-data\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.651519 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6073eed1-f054-4ea2-bd72-407c3d293cb0-logs\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.651601 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.652275 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6073eed1-f054-4ea2-bd72-407c3d293cb0-logs\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.657778 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-config-data\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.666522 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.670524 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5xvw\" (UniqueName: \"kubernetes.io/projected/6073eed1-f054-4ea2-bd72-407c3d293cb0-kube-api-access-l5xvw\") pod \"nova-api-0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " pod="openstack/nova-api-0" Dec 02 10:23:14 crc kubenswrapper[4685]: I1202 10:23:14.751143 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:15 crc kubenswrapper[4685]: I1202 10:23:15.199700 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:15 crc kubenswrapper[4685]: I1202 10:23:15.268606 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6073eed1-f054-4ea2-bd72-407c3d293cb0","Type":"ContainerStarted","Data":"c1b3a8480bfe09ba6ba0d6eb09ae818f4ac3e579b81074e759cbfced8a625ac8"} Dec 02 10:23:15 crc kubenswrapper[4685]: I1202 10:23:15.912529 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27f9b9ce-2044-491c-8172-3afec1a54bd8" path="/var/lib/kubelet/pods/27f9b9ce-2044-491c-8172-3afec1a54bd8/volumes" Dec 02 10:23:16 crc kubenswrapper[4685]: I1202 10:23:16.283305 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6073eed1-f054-4ea2-bd72-407c3d293cb0","Type":"ContainerStarted","Data":"f41557685d432cff4014f1f83ec88d77ea8800ec31c63b65ffa90d6e529c757b"} Dec 02 10:23:16 crc kubenswrapper[4685]: I1202 10:23:16.283354 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6073eed1-f054-4ea2-bd72-407c3d293cb0","Type":"ContainerStarted","Data":"2ada764676d1be843b43c08edae03ce096a6b0a6376f02f4b44302e200114587"} Dec 02 10:23:16 crc kubenswrapper[4685]: I1202 10:23:16.309049 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.309025395 podStartE2EDuration="2.309025395s" podCreationTimestamp="2025-12-02 10:23:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:16.304495241 +0000 UTC m=+1288.676269395" watchObservedRunningTime="2025-12-02 10:23:16.309025395 +0000 UTC m=+1288.680799549" Dec 02 10:23:16 crc kubenswrapper[4685]: I1202 10:23:16.600715 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 10:23:16 crc kubenswrapper[4685]: I1202 10:23:16.600798 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 10:23:16 crc kubenswrapper[4685]: I1202 10:23:16.629608 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 10:23:21 crc kubenswrapper[4685]: I1202 10:23:21.600394 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 10:23:21 crc kubenswrapper[4685]: I1202 10:23:21.601518 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 10:23:21 crc kubenswrapper[4685]: I1202 10:23:21.630193 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 10:23:21 crc kubenswrapper[4685]: I1202 10:23:21.672352 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 10:23:22 crc kubenswrapper[4685]: I1202 10:23:22.388678 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 10:23:22 crc kubenswrapper[4685]: I1202 10:23:22.593384 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 02 10:23:22 crc kubenswrapper[4685]: I1202 10:23:22.618771 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:22 crc kubenswrapper[4685]: I1202 10:23:22.618822 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:24 crc kubenswrapper[4685]: I1202 10:23:24.751597 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:23:24 crc kubenswrapper[4685]: I1202 10:23:24.751665 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:23:25 crc kubenswrapper[4685]: I1202 10:23:25.558918 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 10:23:25 crc kubenswrapper[4685]: I1202 10:23:25.834847 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:25 crc kubenswrapper[4685]: I1202 10:23:25.835219 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:29 crc kubenswrapper[4685]: I1202 10:23:29.537805 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:23:29 crc kubenswrapper[4685]: I1202 10:23:29.538336 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="886b7e36-bc06-45fe-a10d-3840c1f68d24" containerName="kube-state-metrics" containerID="cri-o://fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6" gracePeriod=30 Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.007288 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.076170 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz5mm\" (UniqueName: \"kubernetes.io/projected/886b7e36-bc06-45fe-a10d-3840c1f68d24-kube-api-access-lz5mm\") pod \"886b7e36-bc06-45fe-a10d-3840c1f68d24\" (UID: \"886b7e36-bc06-45fe-a10d-3840c1f68d24\") " Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.082340 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/886b7e36-bc06-45fe-a10d-3840c1f68d24-kube-api-access-lz5mm" (OuterVolumeSpecName: "kube-api-access-lz5mm") pod "886b7e36-bc06-45fe-a10d-3840c1f68d24" (UID: "886b7e36-bc06-45fe-a10d-3840c1f68d24"). InnerVolumeSpecName "kube-api-access-lz5mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.178265 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz5mm\" (UniqueName: \"kubernetes.io/projected/886b7e36-bc06-45fe-a10d-3840c1f68d24-kube-api-access-lz5mm\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.442329 4685 generic.go:334] "Generic (PLEG): container finished" podID="886b7e36-bc06-45fe-a10d-3840c1f68d24" containerID="fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6" exitCode=2 Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.442389 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.442385 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"886b7e36-bc06-45fe-a10d-3840c1f68d24","Type":"ContainerDied","Data":"fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6"} Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.442782 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"886b7e36-bc06-45fe-a10d-3840c1f68d24","Type":"ContainerDied","Data":"bea7a533f33baee701fe00774e682fc8942cfd688dfd8c28e3c19a137e2188f8"} Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.442810 4685 scope.go:117] "RemoveContainer" containerID="fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.482752 4685 scope.go:117] "RemoveContainer" containerID="fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.482876 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:23:30 crc kubenswrapper[4685]: E1202 10:23:30.483270 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6\": container with ID starting with fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6 not found: ID does not exist" containerID="fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.483411 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6"} err="failed to get container status \"fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6\": rpc error: code = NotFound desc = could not find container \"fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6\": container with ID starting with fae2b9fd1dc13be0c45be9d08d6af2e97bc145839686f117aac22f87bb42fcf6 not found: ID does not exist" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.498597 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.507547 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:23:30 crc kubenswrapper[4685]: E1202 10:23:30.508286 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="886b7e36-bc06-45fe-a10d-3840c1f68d24" containerName="kube-state-metrics" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.508362 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="886b7e36-bc06-45fe-a10d-3840c1f68d24" containerName="kube-state-metrics" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.508637 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="886b7e36-bc06-45fe-a10d-3840c1f68d24" containerName="kube-state-metrics" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.509465 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.515893 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.516802 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.526174 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.585463 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.585554 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mqhn\" (UniqueName: \"kubernetes.io/projected/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-api-access-2mqhn\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.585776 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.585893 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.687951 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.688026 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.688125 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.688187 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mqhn\" (UniqueName: \"kubernetes.io/projected/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-api-access-2mqhn\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.695705 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.697150 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.699113 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.706842 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mqhn\" (UniqueName: \"kubernetes.io/projected/23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c-kube-api-access-2mqhn\") pod \"kube-state-metrics-0\" (UID: \"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c\") " pod="openstack/kube-state-metrics-0" Dec 02 10:23:30 crc kubenswrapper[4685]: I1202 10:23:30.829647 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.334864 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.453511 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c","Type":"ContainerStarted","Data":"1f498fcf0817eda3eb832c89ed8159c5f2a25a82c06c29f8af369ca56ee74d2d"} Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.607967 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.609388 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-central-agent" containerID="cri-o://fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af" gracePeriod=30 Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.609640 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="proxy-httpd" containerID="cri-o://13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114" gracePeriod=30 Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.609738 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-notification-agent" containerID="cri-o://1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60" gracePeriod=30 Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.609738 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="sg-core" containerID="cri-o://04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881" gracePeriod=30 Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.636232 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.637902 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.664377 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 10:23:31 crc kubenswrapper[4685]: I1202 10:23:31.920761 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="886b7e36-bc06-45fe-a10d-3840c1f68d24" path="/var/lib/kubelet/pods/886b7e36-bc06-45fe-a10d-3840c1f68d24/volumes" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.428251 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.479140 4685 generic.go:334] "Generic (PLEG): container finished" podID="ef382cd5-f660-46a5-865c-7e39daec2a01" containerID="f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353" exitCode=137 Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.479205 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ef382cd5-f660-46a5-865c-7e39daec2a01","Type":"ContainerDied","Data":"f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353"} Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.479232 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ef382cd5-f660-46a5-865c-7e39daec2a01","Type":"ContainerDied","Data":"13e761485bf834926c9695ac8457f096fa6f9902d6958cfc4c7ad6376c250862"} Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.479247 4685 scope.go:117] "RemoveContainer" containerID="f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.479363 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.503110 4685 generic.go:334] "Generic (PLEG): container finished" podID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerID="13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114" exitCode=0 Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.503148 4685 generic.go:334] "Generic (PLEG): container finished" podID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerID="04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881" exitCode=2 Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.503157 4685 generic.go:334] "Generic (PLEG): container finished" podID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerID="fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af" exitCode=0 Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.503206 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerDied","Data":"13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114"} Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.503236 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerDied","Data":"04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881"} Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.503248 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerDied","Data":"fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af"} Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.513774 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c","Type":"ContainerStarted","Data":"9deb1952dba496c1714bb3555db366e0c7d8c8f4493606bbdd6df75bcdba0c22"} Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.513998 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.521119 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-config-data\") pod \"ef382cd5-f660-46a5-865c-7e39daec2a01\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.521172 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-combined-ca-bundle\") pod \"ef382cd5-f660-46a5-865c-7e39daec2a01\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.521209 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhlwv\" (UniqueName: \"kubernetes.io/projected/ef382cd5-f660-46a5-865c-7e39daec2a01-kube-api-access-zhlwv\") pod \"ef382cd5-f660-46a5-865c-7e39daec2a01\" (UID: \"ef382cd5-f660-46a5-865c-7e39daec2a01\") " Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.537367 4685 scope.go:117] "RemoveContainer" containerID="f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.538241 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef382cd5-f660-46a5-865c-7e39daec2a01-kube-api-access-zhlwv" (OuterVolumeSpecName: "kube-api-access-zhlwv") pod "ef382cd5-f660-46a5-865c-7e39daec2a01" (UID: "ef382cd5-f660-46a5-865c-7e39daec2a01"). InnerVolumeSpecName "kube-api-access-zhlwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:32 crc kubenswrapper[4685]: E1202 10:23:32.538661 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353\": container with ID starting with f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353 not found: ID does not exist" containerID="f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.538691 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353"} err="failed to get container status \"f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353\": rpc error: code = NotFound desc = could not find container \"f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353\": container with ID starting with f4a0d8d60f653df0a43ca3c22e644e700528f8f0106e453e15c66717fe4e0353 not found: ID does not exist" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.557101 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.571413 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-config-data" (OuterVolumeSpecName: "config-data") pod "ef382cd5-f660-46a5-865c-7e39daec2a01" (UID: "ef382cd5-f660-46a5-865c-7e39daec2a01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.599473 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.166559305 podStartE2EDuration="2.599455688s" podCreationTimestamp="2025-12-02 10:23:30 +0000 UTC" firstStartedPulling="2025-12-02 10:23:31.341604928 +0000 UTC m=+1303.713379082" lastFinishedPulling="2025-12-02 10:23:31.774501311 +0000 UTC m=+1304.146275465" observedRunningTime="2025-12-02 10:23:32.558816585 +0000 UTC m=+1304.930590749" watchObservedRunningTime="2025-12-02 10:23:32.599455688 +0000 UTC m=+1304.971229842" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.611108 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef382cd5-f660-46a5-865c-7e39daec2a01" (UID: "ef382cd5-f660-46a5-865c-7e39daec2a01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.623158 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.623191 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef382cd5-f660-46a5-865c-7e39daec2a01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.623201 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhlwv\" (UniqueName: \"kubernetes.io/projected/ef382cd5-f660-46a5-865c-7e39daec2a01-kube-api-access-zhlwv\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.814258 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.821987 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.840211 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:23:32 crc kubenswrapper[4685]: E1202 10:23:32.840785 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef382cd5-f660-46a5-865c-7e39daec2a01" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.840809 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef382cd5-f660-46a5-865c-7e39daec2a01" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.841010 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef382cd5-f660-46a5-865c-7e39daec2a01" containerName="nova-cell1-novncproxy-novncproxy" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.841768 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.846965 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.846978 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.847167 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.868464 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.931845 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7gbg\" (UniqueName: \"kubernetes.io/projected/009e6abc-834a-4487-b70d-0ec6e64d994d-kube-api-access-l7gbg\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.931900 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.931938 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.932190 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:32 crc kubenswrapper[4685]: I1202 10:23:32.932365 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.034747 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.034851 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.034909 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7gbg\" (UniqueName: \"kubernetes.io/projected/009e6abc-834a-4487-b70d-0ec6e64d994d-kube-api-access-l7gbg\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.034933 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.034963 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.042456 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.059456 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.060164 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.064167 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/009e6abc-834a-4487-b70d-0ec6e64d994d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.065513 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7gbg\" (UniqueName: \"kubernetes.io/projected/009e6abc-834a-4487-b70d-0ec6e64d994d-kube-api-access-l7gbg\") pod \"nova-cell1-novncproxy-0\" (UID: \"009e6abc-834a-4487-b70d-0ec6e64d994d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.161032 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.387935 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.441862 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-combined-ca-bundle\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.441954 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-run-httpd\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.442019 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-scripts\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.442125 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-log-httpd\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.442179 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-config-data\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.442203 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-sg-core-conf-yaml\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.442263 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p79cz\" (UniqueName: \"kubernetes.io/projected/e664f4ff-9501-444a-a04f-f65c8c99af93-kube-api-access-p79cz\") pod \"e664f4ff-9501-444a-a04f-f65c8c99af93\" (UID: \"e664f4ff-9501-444a-a04f-f65c8c99af93\") " Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.446367 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.446930 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.451711 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-scripts" (OuterVolumeSpecName: "scripts") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.453151 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e664f4ff-9501-444a-a04f-f65c8c99af93-kube-api-access-p79cz" (OuterVolumeSpecName: "kube-api-access-p79cz") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "kube-api-access-p79cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.489819 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.533397 4685 generic.go:334] "Generic (PLEG): container finished" podID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerID="1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60" exitCode=0 Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.533698 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerDied","Data":"1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60"} Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.533735 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e664f4ff-9501-444a-a04f-f65c8c99af93","Type":"ContainerDied","Data":"99f0554291c6e52251a7af4bbe619263e415674b1fce88397eacd2a9531e2c70"} Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.533767 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.533774 4685 scope.go:117] "RemoveContainer" containerID="13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.541508 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.545703 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.545775 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.545790 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p79cz\" (UniqueName: \"kubernetes.io/projected/e664f4ff-9501-444a-a04f-f65c8c99af93-kube-api-access-p79cz\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.545830 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.545843 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e664f4ff-9501-444a-a04f-f65c8c99af93-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.545854 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.560075 4685 scope.go:117] "RemoveContainer" containerID="04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.567048 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-config-data" (OuterVolumeSpecName: "config-data") pod "e664f4ff-9501-444a-a04f-f65c8c99af93" (UID: "e664f4ff-9501-444a-a04f-f65c8c99af93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.592097 4685 scope.go:117] "RemoveContainer" containerID="1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.615827 4685 scope.go:117] "RemoveContainer" containerID="fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.636332 4685 scope.go:117] "RemoveContainer" containerID="13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.637166 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114\": container with ID starting with 13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114 not found: ID does not exist" containerID="13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.637283 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114"} err="failed to get container status \"13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114\": rpc error: code = NotFound desc = could not find container \"13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114\": container with ID starting with 13664f03aea74201ceeb39191c420f3607ca08cc055ebb6fe904c1694e32d114 not found: ID does not exist" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.637369 4685 scope.go:117] "RemoveContainer" containerID="04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.638832 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881\": container with ID starting with 04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881 not found: ID does not exist" containerID="04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.638855 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881"} err="failed to get container status \"04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881\": rpc error: code = NotFound desc = could not find container \"04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881\": container with ID starting with 04d5517bd587c5f374f838e93cbc9bbe513136ebadf3e77b04f291f8ae433881 not found: ID does not exist" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.638868 4685 scope.go:117] "RemoveContainer" containerID="1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.639686 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60\": container with ID starting with 1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60 not found: ID does not exist" containerID="1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.639705 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60"} err="failed to get container status \"1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60\": rpc error: code = NotFound desc = could not find container \"1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60\": container with ID starting with 1e0680ed5ba7ecd1dcaed1eb71d6812b561efae9e0367b3d8dc0813245ac8e60 not found: ID does not exist" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.639718 4685 scope.go:117] "RemoveContainer" containerID="fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.640213 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af\": container with ID starting with fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af not found: ID does not exist" containerID="fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.640283 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af"} err="failed to get container status \"fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af\": rpc error: code = NotFound desc = could not find container \"fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af\": container with ID starting with fc5a6c36a39eeba8aa54f0af4be0c072fc3bc81a523665ccc63110603d1145af not found: ID does not exist" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.648336 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e664f4ff-9501-444a-a04f-f65c8c99af93-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.670150 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 02 10:23:33 crc kubenswrapper[4685]: W1202 10:23:33.671906 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod009e6abc_834a_4487_b70d_0ec6e64d994d.slice/crio-94b5186ecd090facfbfbcc9538875ca75de651770b1ec51344b676a5218aae52 WatchSource:0}: Error finding container 94b5186ecd090facfbfbcc9538875ca75de651770b1ec51344b676a5218aae52: Status 404 returned error can't find the container with id 94b5186ecd090facfbfbcc9538875ca75de651770b1ec51344b676a5218aae52 Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.916548 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef382cd5-f660-46a5-865c-7e39daec2a01" path="/var/lib/kubelet/pods/ef382cd5-f660-46a5-865c-7e39daec2a01/volumes" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.917246 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.917703 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926121 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.926496 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-notification-agent" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926513 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-notification-agent" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.926529 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="proxy-httpd" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926537 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="proxy-httpd" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.926577 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="sg-core" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926584 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="sg-core" Dec 02 10:23:33 crc kubenswrapper[4685]: E1202 10:23:33.926595 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-central-agent" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926600 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-central-agent" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926769 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-central-agent" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926783 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="proxy-httpd" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926794 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="sg-core" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.926803 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" containerName="ceilometer-notification-agent" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.928897 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.936578 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.936839 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.940031 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:23:33 crc kubenswrapper[4685]: I1202 10:23:33.946995 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056572 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056647 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nx6q\" (UniqueName: \"kubernetes.io/projected/a46a7807-d3ae-4622-a368-5ef271947c67-kube-api-access-5nx6q\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056699 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056741 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-scripts\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056763 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-config-data\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056799 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056914 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-log-httpd\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.056975 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-run-httpd\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158554 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158641 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-log-httpd\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158665 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-run-httpd\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158708 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158756 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nx6q\" (UniqueName: \"kubernetes.io/projected/a46a7807-d3ae-4622-a368-5ef271947c67-kube-api-access-5nx6q\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158781 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158822 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-scripts\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.158840 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-config-data\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.159940 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-log-httpd\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.159944 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-run-httpd\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.164456 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.165169 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-scripts\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.166266 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.167257 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-config-data\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.167808 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.179588 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nx6q\" (UniqueName: \"kubernetes.io/projected/a46a7807-d3ae-4622-a368-5ef271947c67-kube-api-access-5nx6q\") pod \"ceilometer-0\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.250866 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.543867 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"009e6abc-834a-4487-b70d-0ec6e64d994d","Type":"ContainerStarted","Data":"4ba1188ba34e4b297a9e3f17312c4301bc1c7204ed08af7c2cf16aed783a0913"} Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.544202 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"009e6abc-834a-4487-b70d-0ec6e64d994d","Type":"ContainerStarted","Data":"94b5186ecd090facfbfbcc9538875ca75de651770b1ec51344b676a5218aae52"} Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.564773 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.564748957 podStartE2EDuration="2.564748957s" podCreationTimestamp="2025-12-02 10:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:34.561996463 +0000 UTC m=+1306.933770617" watchObservedRunningTime="2025-12-02 10:23:34.564748957 +0000 UTC m=+1306.936523111" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.757795 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.758582 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.762032 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.776184 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 10:23:34 crc kubenswrapper[4685]: I1202 10:23:34.782496 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:35 crc kubenswrapper[4685]: I1202 10:23:35.560595 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerStarted","Data":"57b3bc8ecd974cfc8b2e18811336c1c9d04e378c3918084665ef34eb86bb7d79"} Dec 02 10:23:35 crc kubenswrapper[4685]: I1202 10:23:35.561902 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 10:23:35 crc kubenswrapper[4685]: I1202 10:23:35.733536 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.055378 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e664f4ff-9501-444a-a04f-f65c8c99af93" path="/var/lib/kubelet/pods/e664f4ff-9501-444a-a04f-f65c8c99af93/volumes" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.080607 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6wkt5"] Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.102885 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6wkt5"] Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.102975 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.203713 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.203798 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.203838 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.203876 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.203911 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-config\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.203952 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n7nk\" (UniqueName: \"kubernetes.io/projected/8a3422f7-351c-480e-8d7e-abd5ba07962f-kube-api-access-5n7nk\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.305861 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.305987 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.306063 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-config\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.306099 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n7nk\" (UniqueName: \"kubernetes.io/projected/8a3422f7-351c-480e-8d7e-abd5ba07962f-kube-api-access-5n7nk\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.306187 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.306266 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.307040 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.307138 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.307659 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.307782 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-config\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.309189 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.323646 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n7nk\" (UniqueName: \"kubernetes.io/projected/8a3422f7-351c-480e-8d7e-abd5ba07962f-kube-api-access-5n7nk\") pod \"dnsmasq-dns-89c5cd4d5-6wkt5\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.446468 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:36 crc kubenswrapper[4685]: I1202 10:23:36.574283 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerStarted","Data":"6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb"} Dec 02 10:23:37 crc kubenswrapper[4685]: I1202 10:23:37.060686 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6wkt5"] Dec 02 10:23:37 crc kubenswrapper[4685]: W1202 10:23:37.074015 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a3422f7_351c_480e_8d7e_abd5ba07962f.slice/crio-41b8ce480bb52f87c8ecbec5f4377cb52948ddde5509795d96db2a779b47748d WatchSource:0}: Error finding container 41b8ce480bb52f87c8ecbec5f4377cb52948ddde5509795d96db2a779b47748d: Status 404 returned error can't find the container with id 41b8ce480bb52f87c8ecbec5f4377cb52948ddde5509795d96db2a779b47748d Dec 02 10:23:37 crc kubenswrapper[4685]: I1202 10:23:37.597837 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerStarted","Data":"ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c"} Dec 02 10:23:37 crc kubenswrapper[4685]: I1202 10:23:37.600604 4685 generic.go:334] "Generic (PLEG): container finished" podID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerID="05202a5c7d5bddff79fde775886a1ffca7cd47e9ba4cbaf1382b575516d173a3" exitCode=0 Dec 02 10:23:37 crc kubenswrapper[4685]: I1202 10:23:37.600716 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" event={"ID":"8a3422f7-351c-480e-8d7e-abd5ba07962f","Type":"ContainerDied","Data":"05202a5c7d5bddff79fde775886a1ffca7cd47e9ba4cbaf1382b575516d173a3"} Dec 02 10:23:37 crc kubenswrapper[4685]: I1202 10:23:37.600786 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" event={"ID":"8a3422f7-351c-480e-8d7e-abd5ba07962f","Type":"ContainerStarted","Data":"41b8ce480bb52f87c8ecbec5f4377cb52948ddde5509795d96db2a779b47748d"} Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.161677 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.611599 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" event={"ID":"8a3422f7-351c-480e-8d7e-abd5ba07962f","Type":"ContainerStarted","Data":"5d226dd264a1d2858facd3ce1ee5f5dfe665c66e1610a345b76e238e053dd5f7"} Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.612911 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.615696 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerStarted","Data":"4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967"} Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.676474 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" podStartSLOduration=3.676455193 podStartE2EDuration="3.676455193s" podCreationTimestamp="2025-12-02 10:23:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:38.666025404 +0000 UTC m=+1311.037799558" watchObservedRunningTime="2025-12-02 10:23:38.676455193 +0000 UTC m=+1311.048229337" Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.785581 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.785906 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-api" containerID="cri-o://f41557685d432cff4014f1f83ec88d77ea8800ec31c63b65ffa90d6e529c757b" gracePeriod=30 Dec 02 10:23:38 crc kubenswrapper[4685]: I1202 10:23:38.785812 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-log" containerID="cri-o://2ada764676d1be843b43c08edae03ce096a6b0a6376f02f4b44302e200114587" gracePeriod=30 Dec 02 10:23:39 crc kubenswrapper[4685]: I1202 10:23:39.634420 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerStarted","Data":"4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116"} Dec 02 10:23:39 crc kubenswrapper[4685]: I1202 10:23:39.635549 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:23:39 crc kubenswrapper[4685]: I1202 10:23:39.637396 4685 generic.go:334] "Generic (PLEG): container finished" podID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerID="2ada764676d1be843b43c08edae03ce096a6b0a6376f02f4b44302e200114587" exitCode=143 Dec 02 10:23:39 crc kubenswrapper[4685]: I1202 10:23:39.638233 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6073eed1-f054-4ea2-bd72-407c3d293cb0","Type":"ContainerDied","Data":"2ada764676d1be843b43c08edae03ce096a6b0a6376f02f4b44302e200114587"} Dec 02 10:23:39 crc kubenswrapper[4685]: I1202 10:23:39.668844 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.2780477120000002 podStartE2EDuration="6.668821665s" podCreationTimestamp="2025-12-02 10:23:33 +0000 UTC" firstStartedPulling="2025-12-02 10:23:34.788110134 +0000 UTC m=+1307.159884288" lastFinishedPulling="2025-12-02 10:23:39.178884087 +0000 UTC m=+1311.550658241" observedRunningTime="2025-12-02 10:23:39.659678598 +0000 UTC m=+1312.031452752" watchObservedRunningTime="2025-12-02 10:23:39.668821665 +0000 UTC m=+1312.040595839" Dec 02 10:23:40 crc kubenswrapper[4685]: I1202 10:23:40.844124 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 02 10:23:41 crc kubenswrapper[4685]: I1202 10:23:41.058817 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.148069 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.148400 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.685980 4685 generic.go:334] "Generic (PLEG): container finished" podID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerID="f41557685d432cff4014f1f83ec88d77ea8800ec31c63b65ffa90d6e529c757b" exitCode=0 Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.686072 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6073eed1-f054-4ea2-bd72-407c3d293cb0","Type":"ContainerDied","Data":"f41557685d432cff4014f1f83ec88d77ea8800ec31c63b65ffa90d6e529c757b"} Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.686276 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-central-agent" containerID="cri-o://6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb" gracePeriod=30 Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.686299 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="sg-core" containerID="cri-o://4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967" gracePeriod=30 Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.686301 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="proxy-httpd" containerID="cri-o://4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116" gracePeriod=30 Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.686386 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-notification-agent" containerID="cri-o://ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c" gracePeriod=30 Dec 02 10:23:42 crc kubenswrapper[4685]: I1202 10:23:42.949718 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.053158 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-config-data\") pod \"6073eed1-f054-4ea2-bd72-407c3d293cb0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.053222 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5xvw\" (UniqueName: \"kubernetes.io/projected/6073eed1-f054-4ea2-bd72-407c3d293cb0-kube-api-access-l5xvw\") pod \"6073eed1-f054-4ea2-bd72-407c3d293cb0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.053338 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-combined-ca-bundle\") pod \"6073eed1-f054-4ea2-bd72-407c3d293cb0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.053369 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6073eed1-f054-4ea2-bd72-407c3d293cb0-logs\") pod \"6073eed1-f054-4ea2-bd72-407c3d293cb0\" (UID: \"6073eed1-f054-4ea2-bd72-407c3d293cb0\") " Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.054102 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6073eed1-f054-4ea2-bd72-407c3d293cb0-logs" (OuterVolumeSpecName: "logs") pod "6073eed1-f054-4ea2-bd72-407c3d293cb0" (UID: "6073eed1-f054-4ea2-bd72-407c3d293cb0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.068797 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6073eed1-f054-4ea2-bd72-407c3d293cb0-kube-api-access-l5xvw" (OuterVolumeSpecName: "kube-api-access-l5xvw") pod "6073eed1-f054-4ea2-bd72-407c3d293cb0" (UID: "6073eed1-f054-4ea2-bd72-407c3d293cb0"). InnerVolumeSpecName "kube-api-access-l5xvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.114665 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-config-data" (OuterVolumeSpecName: "config-data") pod "6073eed1-f054-4ea2-bd72-407c3d293cb0" (UID: "6073eed1-f054-4ea2-bd72-407c3d293cb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.133547 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6073eed1-f054-4ea2-bd72-407c3d293cb0" (UID: "6073eed1-f054-4ea2-bd72-407c3d293cb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.155006 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.155044 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6073eed1-f054-4ea2-bd72-407c3d293cb0-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.155055 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6073eed1-f054-4ea2-bd72-407c3d293cb0-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.155063 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5xvw\" (UniqueName: \"kubernetes.io/projected/6073eed1-f054-4ea2-bd72-407c3d293cb0-kube-api-access-l5xvw\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.162254 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:43 crc kubenswrapper[4685]: E1202 10:23:43.196209 4685 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda46a7807_d3ae_4622_a368_5ef271947c67.slice/crio-conmon-4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116.scope\": RecentStats: unable to find data in memory cache]" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.218252 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.694227 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6073eed1-f054-4ea2-bd72-407c3d293cb0","Type":"ContainerDied","Data":"c1b3a8480bfe09ba6ba0d6eb09ae818f4ac3e579b81074e759cbfced8a625ac8"} Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.694285 4685 scope.go:117] "RemoveContainer" containerID="f41557685d432cff4014f1f83ec88d77ea8800ec31c63b65ffa90d6e529c757b" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.694406 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.702036 4685 generic.go:334] "Generic (PLEG): container finished" podID="a46a7807-d3ae-4622-a368-5ef271947c67" containerID="4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116" exitCode=0 Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.702068 4685 generic.go:334] "Generic (PLEG): container finished" podID="a46a7807-d3ae-4622-a368-5ef271947c67" containerID="4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967" exitCode=2 Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.702077 4685 generic.go:334] "Generic (PLEG): container finished" podID="a46a7807-d3ae-4622-a368-5ef271947c67" containerID="ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c" exitCode=0 Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.703447 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerDied","Data":"4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116"} Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.703486 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerDied","Data":"4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967"} Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.703499 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerDied","Data":"ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c"} Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.725371 4685 scope.go:117] "RemoveContainer" containerID="2ada764676d1be843b43c08edae03ce096a6b0a6376f02f4b44302e200114587" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.730459 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.734585 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.745705 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.783988 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:43 crc kubenswrapper[4685]: E1202 10:23:43.784366 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-log" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.784386 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-log" Dec 02 10:23:43 crc kubenswrapper[4685]: E1202 10:23:43.784436 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-api" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.784443 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-api" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.784641 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-api" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.785116 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" containerName="nova-api-log" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.786197 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.791969 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.792026 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.792331 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.809882 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.872027 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlfsg\" (UniqueName: \"kubernetes.io/projected/d851c180-a3d5-4ddd-907f-d9b414398cbb-kube-api-access-vlfsg\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.872068 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.872115 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d851c180-a3d5-4ddd-907f-d9b414398cbb-logs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.872171 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.872216 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-public-tls-certs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.872255 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-config-data\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.913332 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6073eed1-f054-4ea2-bd72-407c3d293cb0" path="/var/lib/kubelet/pods/6073eed1-f054-4ea2-bd72-407c3d293cb0/volumes" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.973434 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-public-tls-certs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.973524 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-config-data\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.973591 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlfsg\" (UniqueName: \"kubernetes.io/projected/d851c180-a3d5-4ddd-907f-d9b414398cbb-kube-api-access-vlfsg\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.973615 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.973669 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d851c180-a3d5-4ddd-907f-d9b414398cbb-logs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.973741 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.975489 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d851c180-a3d5-4ddd-907f-d9b414398cbb-logs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.980748 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-config-data\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.981235 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.987307 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-public-tls-certs\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.991269 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.992213 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-6z76h"] Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.993774 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.995428 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 02 10:23:43 crc kubenswrapper[4685]: I1202 10:23:43.996192 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.010109 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-6z76h"] Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.021062 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlfsg\" (UniqueName: \"kubernetes.io/projected/d851c180-a3d5-4ddd-907f-d9b414398cbb-kube-api-access-vlfsg\") pod \"nova-api-0\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " pod="openstack/nova-api-0" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.075490 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.075900 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lsdt\" (UniqueName: \"kubernetes.io/projected/2517c1e7-aba5-4015-a491-34eee9e1bebb-kube-api-access-9lsdt\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.076046 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-scripts\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.076204 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-config-data\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.107967 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.177533 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.177605 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lsdt\" (UniqueName: \"kubernetes.io/projected/2517c1e7-aba5-4015-a491-34eee9e1bebb-kube-api-access-9lsdt\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.177630 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-scripts\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.177661 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-config-data\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.184266 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-scripts\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.184280 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-config-data\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.184350 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.198978 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lsdt\" (UniqueName: \"kubernetes.io/projected/2517c1e7-aba5-4015-a491-34eee9e1bebb-kube-api-access-9lsdt\") pod \"nova-cell1-cell-mapping-6z76h\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.400208 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.645226 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.718826 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d851c180-a3d5-4ddd-907f-d9b414398cbb","Type":"ContainerStarted","Data":"0a312ce988caf9d06b90e33c17c92ca69dedc004d6ee83d948410965a64c2ae0"} Dec 02 10:23:44 crc kubenswrapper[4685]: I1202 10:23:44.897486 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-6z76h"] Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.476263 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511425 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-run-httpd\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511506 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-log-httpd\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511551 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-combined-ca-bundle\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511602 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-ceilometer-tls-certs\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511655 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nx6q\" (UniqueName: \"kubernetes.io/projected/a46a7807-d3ae-4622-a368-5ef271947c67-kube-api-access-5nx6q\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511699 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-scripts\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511722 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-sg-core-conf-yaml\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.511773 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-config-data\") pod \"a46a7807-d3ae-4622-a368-5ef271947c67\" (UID: \"a46a7807-d3ae-4622-a368-5ef271947c67\") " Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.513041 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.513135 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.531967 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a46a7807-d3ae-4622-a368-5ef271947c67-kube-api-access-5nx6q" (OuterVolumeSpecName: "kube-api-access-5nx6q") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "kube-api-access-5nx6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.552757 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-scripts" (OuterVolumeSpecName: "scripts") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.652068 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nx6q\" (UniqueName: \"kubernetes.io/projected/a46a7807-d3ae-4622-a368-5ef271947c67-kube-api-access-5nx6q\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.652123 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.652137 4685 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.652148 4685 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a46a7807-d3ae-4622-a368-5ef271947c67-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.743318 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.746199 4685 generic.go:334] "Generic (PLEG): container finished" podID="a46a7807-d3ae-4622-a368-5ef271947c67" containerID="6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb" exitCode=0 Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.746369 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerDied","Data":"6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb"} Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.746466 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a46a7807-d3ae-4622-a368-5ef271947c67","Type":"ContainerDied","Data":"57b3bc8ecd974cfc8b2e18811336c1c9d04e378c3918084665ef34eb86bb7d79"} Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.746582 4685 scope.go:117] "RemoveContainer" containerID="4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.746813 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.754685 4685 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.756218 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6z76h" event={"ID":"2517c1e7-aba5-4015-a491-34eee9e1bebb","Type":"ContainerStarted","Data":"9b97f339662b266e6cf426522c09d6ba665ea65e5a724f555ef211022a9d0fa2"} Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.756322 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6z76h" event={"ID":"2517c1e7-aba5-4015-a491-34eee9e1bebb","Type":"ContainerStarted","Data":"fa6824035fed7ea51cc03dfe5f0f68e55d786ff81e368379eceb6454929d127d"} Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.799657 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d851c180-a3d5-4ddd-907f-d9b414398cbb","Type":"ContainerStarted","Data":"1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef"} Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.799713 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d851c180-a3d5-4ddd-907f-d9b414398cbb","Type":"ContainerStarted","Data":"beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1"} Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.847749 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-6z76h" podStartSLOduration=2.847732561 podStartE2EDuration="2.847732561s" podCreationTimestamp="2025-12-02 10:23:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:45.815275499 +0000 UTC m=+1318.187049653" watchObservedRunningTime="2025-12-02 10:23:45.847732561 +0000 UTC m=+1318.219506715" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.857631 4685 scope.go:117] "RemoveContainer" containerID="4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.882692 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.884811 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.884791848 podStartE2EDuration="2.884791848s" podCreationTimestamp="2025-12-02 10:23:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:45.84990858 +0000 UTC m=+1318.221682744" watchObservedRunningTime="2025-12-02 10:23:45.884791848 +0000 UTC m=+1318.256566002" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.904726 4685 scope.go:117] "RemoveContainer" containerID="ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.922955 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.935616 4685 scope.go:117] "RemoveContainer" containerID="6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.955914 4685 scope.go:117] "RemoveContainer" containerID="4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116" Dec 02 10:23:45 crc kubenswrapper[4685]: E1202 10:23:45.956330 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116\": container with ID starting with 4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116 not found: ID does not exist" containerID="4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.956376 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116"} err="failed to get container status \"4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116\": rpc error: code = NotFound desc = could not find container \"4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116\": container with ID starting with 4fd0ff12263156af20ea5d4cb3edb31181717e12012c2663d4bfbb3556c93116 not found: ID does not exist" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.956407 4685 scope.go:117] "RemoveContainer" containerID="4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967" Dec 02 10:23:45 crc kubenswrapper[4685]: E1202 10:23:45.956696 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967\": container with ID starting with 4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967 not found: ID does not exist" containerID="4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.956729 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967"} err="failed to get container status \"4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967\": rpc error: code = NotFound desc = could not find container \"4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967\": container with ID starting with 4ac914b19dfef5a110aa5dadb6e39fbb27ab0ff6e2097cda34a0b76230efe967 not found: ID does not exist" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.956750 4685 scope.go:117] "RemoveContainer" containerID="ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c" Dec 02 10:23:45 crc kubenswrapper[4685]: E1202 10:23:45.956962 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c\": container with ID starting with ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c not found: ID does not exist" containerID="ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.956991 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c"} err="failed to get container status \"ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c\": rpc error: code = NotFound desc = could not find container \"ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c\": container with ID starting with ec432069dcfd8c7f5102f298c0334f81c196b35dba9e5cc224e98554325b857c not found: ID does not exist" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.957009 4685 scope.go:117] "RemoveContainer" containerID="6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb" Dec 02 10:23:45 crc kubenswrapper[4685]: E1202 10:23:45.957226 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb\": container with ID starting with 6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb not found: ID does not exist" containerID="6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.957248 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb"} err="failed to get container status \"6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb\": rpc error: code = NotFound desc = could not find container \"6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb\": container with ID starting with 6a7f09e784fb339efcd1b19b94aeb6ac0001a4b813fdc21c814866f09b0f9deb not found: ID does not exist" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.961205 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-config-data" (OuterVolumeSpecName: "config-data") pod "a46a7807-d3ae-4622-a368-5ef271947c67" (UID: "a46a7807-d3ae-4622-a368-5ef271947c67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.963457 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.963480 4685 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:45 crc kubenswrapper[4685]: I1202 10:23:45.963490 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a46a7807-d3ae-4622-a368-5ef271947c67-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.083930 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.092035 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.111946 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:46 crc kubenswrapper[4685]: E1202 10:23:46.112641 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="sg-core" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.112772 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="sg-core" Dec 02 10:23:46 crc kubenswrapper[4685]: E1202 10:23:46.112894 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="proxy-httpd" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.112975 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="proxy-httpd" Dec 02 10:23:46 crc kubenswrapper[4685]: E1202 10:23:46.113063 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-notification-agent" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.113137 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-notification-agent" Dec 02 10:23:46 crc kubenswrapper[4685]: E1202 10:23:46.113231 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-central-agent" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.113312 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-central-agent" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.113691 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-notification-agent" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.113794 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="proxy-httpd" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.113893 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="sg-core" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.114173 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" containerName="ceilometer-central-agent" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.116115 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.117884 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.118930 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.121235 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.137220 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.268821 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-scripts\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.268895 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.268920 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.269021 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2b4cba34-5e49-48a8-a496-fe5998d56f09-run-httpd\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.269107 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-config-data\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.269130 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2b4cba34-5e49-48a8-a496-fe5998d56f09-log-httpd\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.269191 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.269322 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd2sr\" (UniqueName: \"kubernetes.io/projected/2b4cba34-5e49-48a8-a496-fe5998d56f09-kube-api-access-dd2sr\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.370752 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.370857 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd2sr\" (UniqueName: \"kubernetes.io/projected/2b4cba34-5e49-48a8-a496-fe5998d56f09-kube-api-access-dd2sr\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.370969 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-scripts\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371006 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371026 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371066 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2b4cba34-5e49-48a8-a496-fe5998d56f09-run-httpd\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371103 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-config-data\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371124 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2b4cba34-5e49-48a8-a496-fe5998d56f09-log-httpd\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371672 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2b4cba34-5e49-48a8-a496-fe5998d56f09-run-httpd\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.371719 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2b4cba34-5e49-48a8-a496-fe5998d56f09-log-httpd\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.374907 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.378272 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.378501 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-config-data\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.378535 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-scripts\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.378796 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b4cba34-5e49-48a8-a496-fe5998d56f09-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.391626 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd2sr\" (UniqueName: \"kubernetes.io/projected/2b4cba34-5e49-48a8-a496-fe5998d56f09-kube-api-access-dd2sr\") pod \"ceilometer-0\" (UID: \"2b4cba34-5e49-48a8-a496-fe5998d56f09\") " pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.431674 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.448627 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.589871 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-24s8m"] Dec 02 10:23:46 crc kubenswrapper[4685]: I1202 10:23:46.590647 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" containerName="dnsmasq-dns" containerID="cri-o://105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde" gracePeriod=10 Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.111483 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.619186 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.806448 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-swift-storage-0\") pod \"a083ca66-9f25-4f06-85a1-f37986775b73\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.806496 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5s5w8\" (UniqueName: \"kubernetes.io/projected/a083ca66-9f25-4f06-85a1-f37986775b73-kube-api-access-5s5w8\") pod \"a083ca66-9f25-4f06-85a1-f37986775b73\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.806522 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-sb\") pod \"a083ca66-9f25-4f06-85a1-f37986775b73\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.806696 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-nb\") pod \"a083ca66-9f25-4f06-85a1-f37986775b73\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.806740 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-svc\") pod \"a083ca66-9f25-4f06-85a1-f37986775b73\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.806813 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-config\") pod \"a083ca66-9f25-4f06-85a1-f37986775b73\" (UID: \"a083ca66-9f25-4f06-85a1-f37986775b73\") " Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.860616 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2b4cba34-5e49-48a8-a496-fe5998d56f09","Type":"ContainerStarted","Data":"8988c385119e66dcb37138112984cef1872b08a15b45f04cc38720b287674049"} Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.860955 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a083ca66-9f25-4f06-85a1-f37986775b73-kube-api-access-5s5w8" (OuterVolumeSpecName: "kube-api-access-5s5w8") pod "a083ca66-9f25-4f06-85a1-f37986775b73" (UID: "a083ca66-9f25-4f06-85a1-f37986775b73"). InnerVolumeSpecName "kube-api-access-5s5w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.885233 4685 generic.go:334] "Generic (PLEG): container finished" podID="a083ca66-9f25-4f06-85a1-f37986775b73" containerID="105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde" exitCode=0 Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.885281 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" event={"ID":"a083ca66-9f25-4f06-85a1-f37986775b73","Type":"ContainerDied","Data":"105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde"} Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.885308 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" event={"ID":"a083ca66-9f25-4f06-85a1-f37986775b73","Type":"ContainerDied","Data":"60887ea3b0f17d6b23b601f8b3aa5a692be6f50d588d7ad6a2fbea78352af349"} Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.885340 4685 scope.go:117] "RemoveContainer" containerID="105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.885617 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-24s8m" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.908711 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5s5w8\" (UniqueName: \"kubernetes.io/projected/a083ca66-9f25-4f06-85a1-f37986775b73-kube-api-access-5s5w8\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.935963 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a46a7807-d3ae-4622-a368-5ef271947c67" path="/var/lib/kubelet/pods/a46a7807-d3ae-4622-a368-5ef271947c67/volumes" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.968890 4685 scope.go:117] "RemoveContainer" containerID="0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.987210 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-config" (OuterVolumeSpecName: "config") pod "a083ca66-9f25-4f06-85a1-f37986775b73" (UID: "a083ca66-9f25-4f06-85a1-f37986775b73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:47 crc kubenswrapper[4685]: I1202 10:23:47.993444 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a083ca66-9f25-4f06-85a1-f37986775b73" (UID: "a083ca66-9f25-4f06-85a1-f37986775b73"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:47.999689 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a083ca66-9f25-4f06-85a1-f37986775b73" (UID: "a083ca66-9f25-4f06-85a1-f37986775b73"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.000264 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a083ca66-9f25-4f06-85a1-f37986775b73" (UID: "a083ca66-9f25-4f06-85a1-f37986775b73"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.014650 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.014692 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.014705 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.014719 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.015353 4685 scope.go:117] "RemoveContainer" containerID="105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde" Dec 02 10:23:48 crc kubenswrapper[4685]: E1202 10:23:48.021805 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde\": container with ID starting with 105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde not found: ID does not exist" containerID="105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.021845 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde"} err="failed to get container status \"105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde\": rpc error: code = NotFound desc = could not find container \"105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde\": container with ID starting with 105b15d4c98276a1c10ed2f6d6b72aa508908e579a9b215e32c020b28c726cde not found: ID does not exist" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.021871 4685 scope.go:117] "RemoveContainer" containerID="0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980" Dec 02 10:23:48 crc kubenswrapper[4685]: E1202 10:23:48.022442 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980\": container with ID starting with 0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980 not found: ID does not exist" containerID="0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.022484 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980"} err="failed to get container status \"0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980\": rpc error: code = NotFound desc = could not find container \"0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980\": container with ID starting with 0e8b0b6df088797a7d6b05abbe5bdd89112ae62d354d1ce2b9f9c10e2e7d8980 not found: ID does not exist" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.030686 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a083ca66-9f25-4f06-85a1-f37986775b73" (UID: "a083ca66-9f25-4f06-85a1-f37986775b73"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.115950 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a083ca66-9f25-4f06-85a1-f37986775b73-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.228118 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-24s8m"] Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.236879 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-24s8m"] Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.903042 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2b4cba34-5e49-48a8-a496-fe5998d56f09","Type":"ContainerStarted","Data":"de6330636e97ee6e471d0baca05aa528a8d643a0003c6ba06f8f8f430c40a256"} Dec 02 10:23:48 crc kubenswrapper[4685]: I1202 10:23:48.903598 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2b4cba34-5e49-48a8-a496-fe5998d56f09","Type":"ContainerStarted","Data":"02bd182ee51016324400697aed947f1ba784244d58f712121d30a2b2e58056c3"} Dec 02 10:23:49 crc kubenswrapper[4685]: I1202 10:23:49.914855 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" path="/var/lib/kubelet/pods/a083ca66-9f25-4f06-85a1-f37986775b73/volumes" Dec 02 10:23:49 crc kubenswrapper[4685]: I1202 10:23:49.922836 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2b4cba34-5e49-48a8-a496-fe5998d56f09","Type":"ContainerStarted","Data":"733b64fd491f6111d7bffb8197085d19390712ca568b1fd1d064e7ee50a9153c"} Dec 02 10:23:50 crc kubenswrapper[4685]: I1202 10:23:50.933718 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2b4cba34-5e49-48a8-a496-fe5998d56f09","Type":"ContainerStarted","Data":"1f1d25646af6a62b526dec50688457c309c89d36238a7c124bf4c65c9c3d655b"} Dec 02 10:23:50 crc kubenswrapper[4685]: I1202 10:23:50.934139 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 02 10:23:50 crc kubenswrapper[4685]: I1202 10:23:50.966237 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.5144522980000001 podStartE2EDuration="4.966198006s" podCreationTimestamp="2025-12-02 10:23:46 +0000 UTC" firstStartedPulling="2025-12-02 10:23:47.109516728 +0000 UTC m=+1319.481290882" lastFinishedPulling="2025-12-02 10:23:50.561262436 +0000 UTC m=+1322.933036590" observedRunningTime="2025-12-02 10:23:50.953600588 +0000 UTC m=+1323.325374772" watchObservedRunningTime="2025-12-02 10:23:50.966198006 +0000 UTC m=+1323.337972170" Dec 02 10:23:51 crc kubenswrapper[4685]: I1202 10:23:51.942137 4685 generic.go:334] "Generic (PLEG): container finished" podID="2517c1e7-aba5-4015-a491-34eee9e1bebb" containerID="9b97f339662b266e6cf426522c09d6ba665ea65e5a724f555ef211022a9d0fa2" exitCode=0 Dec 02 10:23:51 crc kubenswrapper[4685]: I1202 10:23:51.943969 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6z76h" event={"ID":"2517c1e7-aba5-4015-a491-34eee9e1bebb","Type":"ContainerDied","Data":"9b97f339662b266e6cf426522c09d6ba665ea65e5a724f555ef211022a9d0fa2"} Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.313661 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.427758 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-combined-ca-bundle\") pod \"2517c1e7-aba5-4015-a491-34eee9e1bebb\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.428394 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lsdt\" (UniqueName: \"kubernetes.io/projected/2517c1e7-aba5-4015-a491-34eee9e1bebb-kube-api-access-9lsdt\") pod \"2517c1e7-aba5-4015-a491-34eee9e1bebb\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.428590 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-config-data\") pod \"2517c1e7-aba5-4015-a491-34eee9e1bebb\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.428837 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-scripts\") pod \"2517c1e7-aba5-4015-a491-34eee9e1bebb\" (UID: \"2517c1e7-aba5-4015-a491-34eee9e1bebb\") " Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.451291 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-scripts" (OuterVolumeSpecName: "scripts") pod "2517c1e7-aba5-4015-a491-34eee9e1bebb" (UID: "2517c1e7-aba5-4015-a491-34eee9e1bebb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.451347 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2517c1e7-aba5-4015-a491-34eee9e1bebb-kube-api-access-9lsdt" (OuterVolumeSpecName: "kube-api-access-9lsdt") pod "2517c1e7-aba5-4015-a491-34eee9e1bebb" (UID: "2517c1e7-aba5-4015-a491-34eee9e1bebb"). InnerVolumeSpecName "kube-api-access-9lsdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.466276 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2517c1e7-aba5-4015-a491-34eee9e1bebb" (UID: "2517c1e7-aba5-4015-a491-34eee9e1bebb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.466829 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-config-data" (OuterVolumeSpecName: "config-data") pod "2517c1e7-aba5-4015-a491-34eee9e1bebb" (UID: "2517c1e7-aba5-4015-a491-34eee9e1bebb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.538884 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.538953 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lsdt\" (UniqueName: \"kubernetes.io/projected/2517c1e7-aba5-4015-a491-34eee9e1bebb-kube-api-access-9lsdt\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.539033 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.539053 4685 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2517c1e7-aba5-4015-a491-34eee9e1bebb-scripts\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.968798 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-6z76h" event={"ID":"2517c1e7-aba5-4015-a491-34eee9e1bebb","Type":"ContainerDied","Data":"fa6824035fed7ea51cc03dfe5f0f68e55d786ff81e368379eceb6454929d127d"} Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.969084 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa6824035fed7ea51cc03dfe5f0f68e55d786ff81e368379eceb6454929d127d" Dec 02 10:23:53 crc kubenswrapper[4685]: I1202 10:23:53.969158 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-6z76h" Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.111776 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.111853 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.152674 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.168882 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.169135 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="afc109bc-598b-4aa5-b7d6-1dac9593c3b8" containerName="nova-scheduler-scheduler" containerID="cri-o://84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89" gracePeriod=30 Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.182145 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.182380 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-log" containerID="cri-o://f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f" gracePeriod=30 Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.182811 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-metadata" containerID="cri-o://5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673" gracePeriod=30 Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.977991 4685 generic.go:334] "Generic (PLEG): container finished" podID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerID="f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f" exitCode=143 Dec 02 10:23:54 crc kubenswrapper[4685]: I1202 10:23:54.978053 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75218bc5-a1d9-463d-8837-7b36d60738d5","Type":"ContainerDied","Data":"f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f"} Dec 02 10:23:55 crc kubenswrapper[4685]: I1202 10:23:55.127769 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:55 crc kubenswrapper[4685]: I1202 10:23:55.127769 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:23:55 crc kubenswrapper[4685]: I1202 10:23:55.988953 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-log" containerID="cri-o://beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1" gracePeriod=30 Dec 02 10:23:55 crc kubenswrapper[4685]: I1202 10:23:55.989092 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-api" containerID="cri-o://1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef" gracePeriod=30 Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.616844 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.810393 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-config-data\") pod \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.810531 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hbpj\" (UniqueName: \"kubernetes.io/projected/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-kube-api-access-2hbpj\") pod \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.810629 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-combined-ca-bundle\") pod \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\" (UID: \"afc109bc-598b-4aa5-b7d6-1dac9593c3b8\") " Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.818184 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-kube-api-access-2hbpj" (OuterVolumeSpecName: "kube-api-access-2hbpj") pod "afc109bc-598b-4aa5-b7d6-1dac9593c3b8" (UID: "afc109bc-598b-4aa5-b7d6-1dac9593c3b8"). InnerVolumeSpecName "kube-api-access-2hbpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.846506 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-config-data" (OuterVolumeSpecName: "config-data") pod "afc109bc-598b-4aa5-b7d6-1dac9593c3b8" (UID: "afc109bc-598b-4aa5-b7d6-1dac9593c3b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.873426 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afc109bc-598b-4aa5-b7d6-1dac9593c3b8" (UID: "afc109bc-598b-4aa5-b7d6-1dac9593c3b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.912948 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.913054 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hbpj\" (UniqueName: \"kubernetes.io/projected/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-kube-api-access-2hbpj\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:56 crc kubenswrapper[4685]: I1202 10:23:56.913074 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc109bc-598b-4aa5-b7d6-1dac9593c3b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.016892 4685 generic.go:334] "Generic (PLEG): container finished" podID="afc109bc-598b-4aa5-b7d6-1dac9593c3b8" containerID="84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89" exitCode=0 Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.016938 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"afc109bc-598b-4aa5-b7d6-1dac9593c3b8","Type":"ContainerDied","Data":"84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89"} Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.016964 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.016996 4685 scope.go:117] "RemoveContainer" containerID="84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.016984 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"afc109bc-598b-4aa5-b7d6-1dac9593c3b8","Type":"ContainerDied","Data":"b61a9a475a90c18d6d7e1343cb55b3eb1367f7ea4aacf774999b8318f17c4f55"} Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.023402 4685 generic.go:334] "Generic (PLEG): container finished" podID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerID="beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1" exitCode=143 Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.023652 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d851c180-a3d5-4ddd-907f-d9b414398cbb","Type":"ContainerDied","Data":"beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1"} Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.050811 4685 scope.go:117] "RemoveContainer" containerID="84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89" Dec 02 10:23:57 crc kubenswrapper[4685]: E1202 10:23:57.051751 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89\": container with ID starting with 84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89 not found: ID does not exist" containerID="84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.051781 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89"} err="failed to get container status \"84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89\": rpc error: code = NotFound desc = could not find container \"84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89\": container with ID starting with 84a0587270a53f9dd427382175b4e1a304a7cfacf5c808e0b9fd66df7945fa89 not found: ID does not exist" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.056195 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.065296 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.143321 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:57 crc kubenswrapper[4685]: E1202 10:23:57.145320 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" containerName="init" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.145504 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" containerName="init" Dec 02 10:23:57 crc kubenswrapper[4685]: E1202 10:23:57.145698 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2517c1e7-aba5-4015-a491-34eee9e1bebb" containerName="nova-manage" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.145771 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2517c1e7-aba5-4015-a491-34eee9e1bebb" containerName="nova-manage" Dec 02 10:23:57 crc kubenswrapper[4685]: E1202 10:23:57.145858 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" containerName="dnsmasq-dns" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.146022 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" containerName="dnsmasq-dns" Dec 02 10:23:57 crc kubenswrapper[4685]: E1202 10:23:57.146193 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc109bc-598b-4aa5-b7d6-1dac9593c3b8" containerName="nova-scheduler-scheduler" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.146267 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc109bc-598b-4aa5-b7d6-1dac9593c3b8" containerName="nova-scheduler-scheduler" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.146955 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a083ca66-9f25-4f06-85a1-f37986775b73" containerName="dnsmasq-dns" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.147082 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2517c1e7-aba5-4015-a491-34eee9e1bebb" containerName="nova-manage" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.147168 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc109bc-598b-4aa5-b7d6-1dac9593c3b8" containerName="nova-scheduler-scheduler" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.148761 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.151220 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.174343 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.321737 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b721a8-0f97-4892-8c89-56382988595e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.321834 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b721a8-0f97-4892-8c89-56382988595e-config-data\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.321970 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnd52\" (UniqueName: \"kubernetes.io/projected/b8b721a8-0f97-4892-8c89-56382988595e-kube-api-access-rnd52\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.428652 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b721a8-0f97-4892-8c89-56382988595e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.428726 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b721a8-0f97-4892-8c89-56382988595e-config-data\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.428840 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnd52\" (UniqueName: \"kubernetes.io/projected/b8b721a8-0f97-4892-8c89-56382988595e-kube-api-access-rnd52\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.433502 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b721a8-0f97-4892-8c89-56382988595e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.434532 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8b721a8-0f97-4892-8c89-56382988595e-config-data\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.450792 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnd52\" (UniqueName: \"kubernetes.io/projected/b8b721a8-0f97-4892-8c89-56382988595e-kube-api-access-rnd52\") pod \"nova-scheduler-0\" (UID: \"b8b721a8-0f97-4892-8c89-56382988595e\") " pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.487109 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.624659 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:56816->10.217.0.191:8775: read: connection reset by peer" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.625278 4685 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:56810->10.217.0.191:8775: read: connection reset by peer" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.912582 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc109bc-598b-4aa5-b7d6-1dac9593c3b8" path="/var/lib/kubelet/pods/afc109bc-598b-4aa5-b7d6-1dac9593c3b8/volumes" Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.967193 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 02 10:23:57 crc kubenswrapper[4685]: I1202 10:23:57.977660 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.038127 4685 generic.go:334] "Generic (PLEG): container finished" podID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerID="5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673" exitCode=0 Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.038213 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75218bc5-a1d9-463d-8837-7b36d60738d5","Type":"ContainerDied","Data":"5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673"} Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.038242 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75218bc5-a1d9-463d-8837-7b36d60738d5","Type":"ContainerDied","Data":"94eeb92cc92f3c7984f7b6828d8082592ec1f7440e2923a3ad8907ec58d8ee70"} Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.038258 4685 scope.go:117] "RemoveContainer" containerID="5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.038806 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.040646 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75218bc5-a1d9-463d-8837-7b36d60738d5-logs\") pod \"75218bc5-a1d9-463d-8837-7b36d60738d5\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.040958 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-config-data\") pod \"75218bc5-a1d9-463d-8837-7b36d60738d5\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.041045 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddslb\" (UniqueName: \"kubernetes.io/projected/75218bc5-a1d9-463d-8837-7b36d60738d5-kube-api-access-ddslb\") pod \"75218bc5-a1d9-463d-8837-7b36d60738d5\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.041094 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-nova-metadata-tls-certs\") pod \"75218bc5-a1d9-463d-8837-7b36d60738d5\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.041123 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-combined-ca-bundle\") pod \"75218bc5-a1d9-463d-8837-7b36d60738d5\" (UID: \"75218bc5-a1d9-463d-8837-7b36d60738d5\") " Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.047580 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75218bc5-a1d9-463d-8837-7b36d60738d5-logs" (OuterVolumeSpecName: "logs") pod "75218bc5-a1d9-463d-8837-7b36d60738d5" (UID: "75218bc5-a1d9-463d-8837-7b36d60738d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.057139 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75218bc5-a1d9-463d-8837-7b36d60738d5-kube-api-access-ddslb" (OuterVolumeSpecName: "kube-api-access-ddslb") pod "75218bc5-a1d9-463d-8837-7b36d60738d5" (UID: "75218bc5-a1d9-463d-8837-7b36d60738d5"). InnerVolumeSpecName "kube-api-access-ddslb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.061664 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b8b721a8-0f97-4892-8c89-56382988595e","Type":"ContainerStarted","Data":"394fc37b0881a0da4e6a209591a90dd4587ddfc17358ff6cc8d8ba5861375696"} Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.090800 4685 scope.go:117] "RemoveContainer" containerID="f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.097217 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-config-data" (OuterVolumeSpecName: "config-data") pod "75218bc5-a1d9-463d-8837-7b36d60738d5" (UID: "75218bc5-a1d9-463d-8837-7b36d60738d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.128758 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75218bc5-a1d9-463d-8837-7b36d60738d5" (UID: "75218bc5-a1d9-463d-8837-7b36d60738d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.137819 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "75218bc5-a1d9-463d-8837-7b36d60738d5" (UID: "75218bc5-a1d9-463d-8837-7b36d60738d5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.144729 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddslb\" (UniqueName: \"kubernetes.io/projected/75218bc5-a1d9-463d-8837-7b36d60738d5-kube-api-access-ddslb\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.144848 4685 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.144924 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.144990 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75218bc5-a1d9-463d-8837-7b36d60738d5-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.145050 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75218bc5-a1d9-463d-8837-7b36d60738d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.238521 4685 scope.go:117] "RemoveContainer" containerID="5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673" Dec 02 10:23:58 crc kubenswrapper[4685]: E1202 10:23:58.239031 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673\": container with ID starting with 5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673 not found: ID does not exist" containerID="5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.239068 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673"} err="failed to get container status \"5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673\": rpc error: code = NotFound desc = could not find container \"5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673\": container with ID starting with 5edd9101781648b8d1781bd44df4c5f9c29589fcacf3b7ecf5ddde758a75a673 not found: ID does not exist" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.239090 4685 scope.go:117] "RemoveContainer" containerID="f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f" Dec 02 10:23:58 crc kubenswrapper[4685]: E1202 10:23:58.239475 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f\": container with ID starting with f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f not found: ID does not exist" containerID="f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.239525 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f"} err="failed to get container status \"f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f\": rpc error: code = NotFound desc = could not find container \"f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f\": container with ID starting with f2407f2bf058999e1a9dadb5ee3b43d3c8ce2b18817cc4adf042ff597719696f not found: ID does not exist" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.376929 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.386892 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.398285 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:58 crc kubenswrapper[4685]: E1202 10:23:58.398775 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-log" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.398799 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-log" Dec 02 10:23:58 crc kubenswrapper[4685]: E1202 10:23:58.398836 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-metadata" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.398843 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-metadata" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.399301 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-metadata" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.399322 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" containerName="nova-metadata-log" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.400358 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.402183 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.403521 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.438312 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.449876 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-config-data\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.449941 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.449967 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-logs\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.450289 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.450496 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjtwp\" (UniqueName: \"kubernetes.io/projected/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-kube-api-access-xjtwp\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.551608 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjtwp\" (UniqueName: \"kubernetes.io/projected/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-kube-api-access-xjtwp\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.551687 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-config-data\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.551750 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.551777 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-logs\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.551881 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.552524 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-logs\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.556626 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-config-data\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.557072 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.557605 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.571194 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjtwp\" (UniqueName: \"kubernetes.io/projected/2f14ae08-f9e6-41bc-bb0c-9e6450267d63-kube-api-access-xjtwp\") pod \"nova-metadata-0\" (UID: \"2f14ae08-f9e6-41bc-bb0c-9e6450267d63\") " pod="openstack/nova-metadata-0" Dec 02 10:23:58 crc kubenswrapper[4685]: I1202 10:23:58.717186 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 02 10:23:59 crc kubenswrapper[4685]: I1202 10:23:59.072597 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b8b721a8-0f97-4892-8c89-56382988595e","Type":"ContainerStarted","Data":"410bd1cf4a58bf949d8c992f2a21e2abac19912f812e9d27c5215df97f5036bd"} Dec 02 10:23:59 crc kubenswrapper[4685]: I1202 10:23:59.099363 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.099344725 podStartE2EDuration="2.099344725s" podCreationTimestamp="2025-12-02 10:23:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:23:59.090882367 +0000 UTC m=+1331.462656521" watchObservedRunningTime="2025-12-02 10:23:59.099344725 +0000 UTC m=+1331.471118869" Dec 02 10:23:59 crc kubenswrapper[4685]: I1202 10:23:59.168818 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 02 10:23:59 crc kubenswrapper[4685]: I1202 10:23:59.912723 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75218bc5-a1d9-463d-8837-7b36d60738d5" path="/var/lib/kubelet/pods/75218bc5-a1d9-463d-8837-7b36d60738d5/volumes" Dec 02 10:24:00 crc kubenswrapper[4685]: I1202 10:24:00.087458 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f14ae08-f9e6-41bc-bb0c-9e6450267d63","Type":"ContainerStarted","Data":"c355d876272c3e943afa579bb0e7edb84224344860b2becf0a2098e6c3b1a8ed"} Dec 02 10:24:00 crc kubenswrapper[4685]: I1202 10:24:00.087539 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f14ae08-f9e6-41bc-bb0c-9e6450267d63","Type":"ContainerStarted","Data":"fcb8c5b3785d49f6de083430cc4be15c599defcb9a7be51e6e8f7b2eff1f2674"} Dec 02 10:24:00 crc kubenswrapper[4685]: I1202 10:24:00.087551 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f14ae08-f9e6-41bc-bb0c-9e6450267d63","Type":"ContainerStarted","Data":"4179f00a015b653d276b8c12737064e636af8199b9b2afe1275d2858e2141950"} Dec 02 10:24:00 crc kubenswrapper[4685]: I1202 10:24:00.112091 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.112071802 podStartE2EDuration="2.112071802s" podCreationTimestamp="2025-12-02 10:23:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:24:00.10863501 +0000 UTC m=+1332.480409154" watchObservedRunningTime="2025-12-02 10:24:00.112071802 +0000 UTC m=+1332.483845956" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.804415 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.916711 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-public-tls-certs\") pod \"d851c180-a3d5-4ddd-907f-d9b414398cbb\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.917117 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-internal-tls-certs\") pod \"d851c180-a3d5-4ddd-907f-d9b414398cbb\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.917287 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-config-data\") pod \"d851c180-a3d5-4ddd-907f-d9b414398cbb\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.917717 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d851c180-a3d5-4ddd-907f-d9b414398cbb-logs\") pod \"d851c180-a3d5-4ddd-907f-d9b414398cbb\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.917843 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-combined-ca-bundle\") pod \"d851c180-a3d5-4ddd-907f-d9b414398cbb\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.917925 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vlfsg\" (UniqueName: \"kubernetes.io/projected/d851c180-a3d5-4ddd-907f-d9b414398cbb-kube-api-access-vlfsg\") pod \"d851c180-a3d5-4ddd-907f-d9b414398cbb\" (UID: \"d851c180-a3d5-4ddd-907f-d9b414398cbb\") " Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.918148 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d851c180-a3d5-4ddd-907f-d9b414398cbb-logs" (OuterVolumeSpecName: "logs") pod "d851c180-a3d5-4ddd-907f-d9b414398cbb" (UID: "d851c180-a3d5-4ddd-907f-d9b414398cbb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.918385 4685 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d851c180-a3d5-4ddd-907f-d9b414398cbb-logs\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.922650 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d851c180-a3d5-4ddd-907f-d9b414398cbb-kube-api-access-vlfsg" (OuterVolumeSpecName: "kube-api-access-vlfsg") pod "d851c180-a3d5-4ddd-907f-d9b414398cbb" (UID: "d851c180-a3d5-4ddd-907f-d9b414398cbb"). InnerVolumeSpecName "kube-api-access-vlfsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.947880 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d851c180-a3d5-4ddd-907f-d9b414398cbb" (UID: "d851c180-a3d5-4ddd-907f-d9b414398cbb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.949983 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-config-data" (OuterVolumeSpecName: "config-data") pod "d851c180-a3d5-4ddd-907f-d9b414398cbb" (UID: "d851c180-a3d5-4ddd-907f-d9b414398cbb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.972811 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d851c180-a3d5-4ddd-907f-d9b414398cbb" (UID: "d851c180-a3d5-4ddd-907f-d9b414398cbb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:24:01 crc kubenswrapper[4685]: I1202 10:24:01.982814 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d851c180-a3d5-4ddd-907f-d9b414398cbb" (UID: "d851c180-a3d5-4ddd-907f-d9b414398cbb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.020534 4685 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.020575 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.020586 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.020595 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vlfsg\" (UniqueName: \"kubernetes.io/projected/d851c180-a3d5-4ddd-907f-d9b414398cbb-kube-api-access-vlfsg\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.020604 4685 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d851c180-a3d5-4ddd-907f-d9b414398cbb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.115181 4685 generic.go:334] "Generic (PLEG): container finished" podID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerID="1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef" exitCode=0 Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.115255 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d851c180-a3d5-4ddd-907f-d9b414398cbb","Type":"ContainerDied","Data":"1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef"} Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.115303 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d851c180-a3d5-4ddd-907f-d9b414398cbb","Type":"ContainerDied","Data":"0a312ce988caf9d06b90e33c17c92ca69dedc004d6ee83d948410965a64c2ae0"} Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.115306 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.115338 4685 scope.go:117] "RemoveContainer" containerID="1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.144448 4685 scope.go:117] "RemoveContainer" containerID="beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.166020 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.182065 4685 scope.go:117] "RemoveContainer" containerID="1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef" Dec 02 10:24:02 crc kubenswrapper[4685]: E1202 10:24:02.184989 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef\": container with ID starting with 1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef not found: ID does not exist" containerID="1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.185027 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef"} err="failed to get container status \"1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef\": rpc error: code = NotFound desc = could not find container \"1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef\": container with ID starting with 1658d9e7f36f952d73848c1f7ff73791741db449ce3e91c22404f9781e65fdef not found: ID does not exist" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.185047 4685 scope.go:117] "RemoveContainer" containerID="beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1" Dec 02 10:24:02 crc kubenswrapper[4685]: E1202 10:24:02.186982 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1\": container with ID starting with beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1 not found: ID does not exist" containerID="beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.187007 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1"} err="failed to get container status \"beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1\": rpc error: code = NotFound desc = could not find container \"beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1\": container with ID starting with beca0fcaa968c0992f2e6cc3b017caf7770f1d809e6db342631202f8ebb6abf1 not found: ID does not exist" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.187226 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.195151 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 02 10:24:02 crc kubenswrapper[4685]: E1202 10:24:02.195683 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-api" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.195700 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-api" Dec 02 10:24:02 crc kubenswrapper[4685]: E1202 10:24:02.195720 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-log" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.195727 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-log" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.195923 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-api" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.195947 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" containerName="nova-api-log" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.196934 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.200118 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.200182 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.200368 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.203702 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.325590 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxvq4\" (UniqueName: \"kubernetes.io/projected/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-kube-api-access-rxvq4\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.325627 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-logs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.325679 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-config-data\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.325721 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.325968 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-public-tls-certs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.326140 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.427596 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-public-tls-certs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.427695 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.427749 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxvq4\" (UniqueName: \"kubernetes.io/projected/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-kube-api-access-rxvq4\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.427771 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-logs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.427830 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-config-data\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.427863 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.428620 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-logs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.431668 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-config-data\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.432076 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.432841 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-public-tls-certs\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.433131 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.450113 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxvq4\" (UniqueName: \"kubernetes.io/projected/5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2-kube-api-access-rxvq4\") pod \"nova-api-0\" (UID: \"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2\") " pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.487927 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.528015 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 02 10:24:02 crc kubenswrapper[4685]: I1202 10:24:02.952679 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 02 10:24:02 crc kubenswrapper[4685]: W1202 10:24:02.958060 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ac39f0c_a9f2_4a7b_b34b_0b0e7371cff2.slice/crio-39c0cb43c5992e62878cc010f2920efd0650f4b2d39bfff298cf78ea6efddba2 WatchSource:0}: Error finding container 39c0cb43c5992e62878cc010f2920efd0650f4b2d39bfff298cf78ea6efddba2: Status 404 returned error can't find the container with id 39c0cb43c5992e62878cc010f2920efd0650f4b2d39bfff298cf78ea6efddba2 Dec 02 10:24:03 crc kubenswrapper[4685]: I1202 10:24:03.131842 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2","Type":"ContainerStarted","Data":"39c0cb43c5992e62878cc010f2920efd0650f4b2d39bfff298cf78ea6efddba2"} Dec 02 10:24:03 crc kubenswrapper[4685]: I1202 10:24:03.717894 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 10:24:03 crc kubenswrapper[4685]: I1202 10:24:03.717942 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 02 10:24:03 crc kubenswrapper[4685]: I1202 10:24:03.911826 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d851c180-a3d5-4ddd-907f-d9b414398cbb" path="/var/lib/kubelet/pods/d851c180-a3d5-4ddd-907f-d9b414398cbb/volumes" Dec 02 10:24:04 crc kubenswrapper[4685]: I1202 10:24:04.144129 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2","Type":"ContainerStarted","Data":"b5816c13e2dda170660089891d6733f256b6c43c6d59eef7a1cb153cb150b014"} Dec 02 10:24:04 crc kubenswrapper[4685]: I1202 10:24:04.144171 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2","Type":"ContainerStarted","Data":"29927ad885856a2a0d5df6d1ffde0fbf35e7101b44c824f4c3cd20ab947c1acb"} Dec 02 10:24:04 crc kubenswrapper[4685]: I1202 10:24:04.178232 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.178211866 podStartE2EDuration="2.178211866s" podCreationTimestamp="2025-12-02 10:24:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:24:04.17171227 +0000 UTC m=+1336.543486464" watchObservedRunningTime="2025-12-02 10:24:04.178211866 +0000 UTC m=+1336.549986030" Dec 02 10:24:07 crc kubenswrapper[4685]: I1202 10:24:07.488012 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 02 10:24:07 crc kubenswrapper[4685]: I1202 10:24:07.513413 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 02 10:24:08 crc kubenswrapper[4685]: I1202 10:24:08.220825 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 02 10:24:08 crc kubenswrapper[4685]: I1202 10:24:08.718656 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 10:24:08 crc kubenswrapper[4685]: I1202 10:24:08.719936 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 02 10:24:09 crc kubenswrapper[4685]: I1202 10:24:09.732743 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2f14ae08-f9e6-41bc-bb0c-9e6450267d63" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:24:09 crc kubenswrapper[4685]: I1202 10:24:09.732759 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="2f14ae08-f9e6-41bc-bb0c-9e6450267d63" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.148006 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.148401 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.148478 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.149873 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b848acb1e4ea5bfba36b62a7bd4c00f067f1b500433e3bc3d9d5085e521463fd"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.149999 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://b848acb1e4ea5bfba36b62a7bd4c00f067f1b500433e3bc3d9d5085e521463fd" gracePeriod=600 Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.528705 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:24:12 crc kubenswrapper[4685]: I1202 10:24:12.529223 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 02 10:24:13 crc kubenswrapper[4685]: I1202 10:24:13.240774 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="b848acb1e4ea5bfba36b62a7bd4c00f067f1b500433e3bc3d9d5085e521463fd" exitCode=0 Dec 02 10:24:13 crc kubenswrapper[4685]: I1202 10:24:13.240970 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"b848acb1e4ea5bfba36b62a7bd4c00f067f1b500433e3bc3d9d5085e521463fd"} Dec 02 10:24:13 crc kubenswrapper[4685]: I1202 10:24:13.241129 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd"} Dec 02 10:24:13 crc kubenswrapper[4685]: I1202 10:24:13.241158 4685 scope.go:117] "RemoveContainer" containerID="a4f65c3934f925bd4944d91a313fa6182d1454a9f7eaa524038fcf17bffe22c2" Dec 02 10:24:13 crc kubenswrapper[4685]: I1202 10:24:13.544815 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:24:13 crc kubenswrapper[4685]: I1202 10:24:13.545167 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 02 10:24:16 crc kubenswrapper[4685]: I1202 10:24:16.445358 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 02 10:24:18 crc kubenswrapper[4685]: I1202 10:24:18.724670 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 10:24:18 crc kubenswrapper[4685]: I1202 10:24:18.725096 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 02 10:24:18 crc kubenswrapper[4685]: I1202 10:24:18.729301 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 10:24:18 crc kubenswrapper[4685]: I1202 10:24:18.733955 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 02 10:24:22 crc kubenswrapper[4685]: I1202 10:24:22.536535 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 10:24:22 crc kubenswrapper[4685]: I1202 10:24:22.538123 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 10:24:22 crc kubenswrapper[4685]: I1202 10:24:22.541174 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 02 10:24:22 crc kubenswrapper[4685]: I1202 10:24:22.549995 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 10:24:23 crc kubenswrapper[4685]: I1202 10:24:23.361713 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 02 10:24:23 crc kubenswrapper[4685]: I1202 10:24:23.376414 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 02 10:24:32 crc kubenswrapper[4685]: I1202 10:24:32.427066 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:24:33 crc kubenswrapper[4685]: I1202 10:24:33.258767 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:24:36 crc kubenswrapper[4685]: I1202 10:24:36.670265 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerName="rabbitmq" containerID="cri-o://567e4b57d73123f06069c5a2696b211119e19f6aaf789819f4e76abe39df067b" gracePeriod=604796 Dec 02 10:24:37 crc kubenswrapper[4685]: I1202 10:24:37.917094 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerName="rabbitmq" containerID="cri-o://1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c" gracePeriod=604796 Dec 02 10:24:43 crc kubenswrapper[4685]: I1202 10:24:43.543193 4685 generic.go:334] "Generic (PLEG): container finished" podID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerID="567e4b57d73123f06069c5a2696b211119e19f6aaf789819f4e76abe39df067b" exitCode=0 Dec 02 10:24:43 crc kubenswrapper[4685]: I1202 10:24:43.543269 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"72a08bf5-82d6-48e0-a188-c7ac46ba22b4","Type":"ContainerDied","Data":"567e4b57d73123f06069c5a2696b211119e19f6aaf789819f4e76abe39df067b"} Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.201090 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.360924 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-erlang-cookie\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.364284 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-plugins-conf\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.364947 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc2sl\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-kube-api-access-fc2sl\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.365002 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-plugins\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.365045 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-config-data\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.366725 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.366783 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-confd\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.366810 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-erlang-cookie-secret\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.366832 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-tls\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.366869 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-pod-info\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.366907 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-server-conf\") pod \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\" (UID: \"72a08bf5-82d6-48e0-a188-c7ac46ba22b4\") " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.370381 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.371484 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.372265 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.380593 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-kube-api-access-fc2sl" (OuterVolumeSpecName: "kube-api-access-fc2sl") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "kube-api-access-fc2sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.380695 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.385865 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-pod-info" (OuterVolumeSpecName: "pod-info") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.400389 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.401746 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-config-data" (OuterVolumeSpecName: "config-data") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.456932 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470074 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470114 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470156 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470172 4685 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470188 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470199 4685 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470240 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470252 4685 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.470264 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc2sl\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-kube-api-access-fc2sl\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.480085 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-server-conf" (OuterVolumeSpecName: "server-conf") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.498704 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.518915 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "72a08bf5-82d6-48e0-a188-c7ac46ba22b4" (UID: "72a08bf5-82d6-48e0-a188-c7ac46ba22b4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.558664 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"72a08bf5-82d6-48e0-a188-c7ac46ba22b4","Type":"ContainerDied","Data":"cdbdb1a50e3641169eab39aaa66f8dd8d62adc9f36631638934552e7a65f81ef"} Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.558715 4685 scope.go:117] "RemoveContainer" containerID="567e4b57d73123f06069c5a2696b211119e19f6aaf789819f4e76abe39df067b" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.558923 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.572753 4685 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.573415 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.573444 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72a08bf5-82d6-48e0-a188-c7ac46ba22b4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.589702 4685 scope.go:117] "RemoveContainer" containerID="ab362a7a28d7c14f4bc121a5fe40bd1d254d7dfc824d5973af3c51bd6cc6e44b" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.614501 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.631518 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.658743 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:24:44 crc kubenswrapper[4685]: E1202 10:24:44.659179 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerName="setup-container" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.659198 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerName="setup-container" Dec 02 10:24:44 crc kubenswrapper[4685]: E1202 10:24:44.659237 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerName="rabbitmq" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.659242 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerName="rabbitmq" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.659457 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" containerName="rabbitmq" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.660456 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.664519 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.664701 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.664919 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-nkmv4" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.664946 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.665005 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.665039 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.665115 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.691962 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.778338 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.778711 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.778738 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hktnl\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-kube-api-access-hktnl\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.778832 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779248 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779328 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-config-data\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779374 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779474 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779552 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779604 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.779627 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881548 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881622 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-config-data\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881655 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881705 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881729 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881750 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881772 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881813 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881834 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881849 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hktnl\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-kube-api-access-hktnl\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.881875 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.882897 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.883359 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.886183 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-config-data\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.887479 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.887500 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.890764 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.895124 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.897716 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.901378 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.912923 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hktnl\" (UniqueName: \"kubernetes.io/projected/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-kube-api-access-hktnl\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.918246 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/622e7d58-a7d5-4898-a94b-ac66e3d0ee7f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.923277 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.953860 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f\") " pod="openstack/rabbitmq-server-0" Dec 02 10:24:44 crc kubenswrapper[4685]: I1202 10:24:44.988464 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.091292 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-server-conf\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.091346 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-erlang-cookie-secret\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.091405 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-tls\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.091692 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-pod-info\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.091850 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-confd\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092184 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqlst\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-kube-api-access-fqlst\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092236 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-erlang-cookie\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092265 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-plugins\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092299 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092323 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-config-data\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092357 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-plugins-conf\") pod \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\" (UID: \"e0366597-4ac4-482d-ba5f-dfa2956d1fb3\") " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092844 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.092960 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.093296 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.093843 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.097518 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-pod-info" (OuterVolumeSpecName: "pod-info") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.097514 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-kube-api-access-fqlst" (OuterVolumeSpecName: "kube-api-access-fqlst") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "kube-api-access-fqlst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.098660 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.100421 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.103262 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.172475 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-config-data" (OuterVolumeSpecName: "config-data") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202850 4685 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202895 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202915 4685 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-pod-info\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202927 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqlst\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-kube-api-access-fqlst\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202938 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202982 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.202996 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.203008 4685 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.249774 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.267057 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-server-conf" (OuterVolumeSpecName: "server-conf") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.305113 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.305147 4685 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-server-conf\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.330018 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e0366597-4ac4-482d-ba5f-dfa2956d1fb3" (UID: "e0366597-4ac4-482d-ba5f-dfa2956d1fb3"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.406749 4685 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e0366597-4ac4-482d-ba5f-dfa2956d1fb3-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.573135 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 02 10:24:45 crc kubenswrapper[4685]: W1202 10:24:45.573178 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod622e7d58_a7d5_4898_a94b_ac66e3d0ee7f.slice/crio-10869e9098e545f065be6a107df69456834840ee266d6c316c22283f22395619 WatchSource:0}: Error finding container 10869e9098e545f065be6a107df69456834840ee266d6c316c22283f22395619: Status 404 returned error can't find the container with id 10869e9098e545f065be6a107df69456834840ee266d6c316c22283f22395619 Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.573407 4685 generic.go:334] "Generic (PLEG): container finished" podID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerID="1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c" exitCode=0 Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.573447 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0366597-4ac4-482d-ba5f-dfa2956d1fb3","Type":"ContainerDied","Data":"1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c"} Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.573477 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e0366597-4ac4-482d-ba5f-dfa2956d1fb3","Type":"ContainerDied","Data":"f7a749d9e3b8907be762a78cfff654b9fbd673229705469e448ef1126e2d98a1"} Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.573494 4685 scope.go:117] "RemoveContainer" containerID="1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.573650 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.620992 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.629034 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.647348 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:24:45 crc kubenswrapper[4685]: E1202 10:24:45.647878 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerName="rabbitmq" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.647894 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerName="rabbitmq" Dec 02 10:24:45 crc kubenswrapper[4685]: E1202 10:24:45.647926 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerName="setup-container" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.647933 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerName="setup-container" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.648096 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" containerName="rabbitmq" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.650180 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.654770 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.655128 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.655423 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-96wnm" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.655612 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.655733 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.655872 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.655762 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.663276 4685 scope.go:117] "RemoveContainer" containerID="b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.683030 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.699726 4685 scope.go:117] "RemoveContainer" containerID="1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c" Dec 02 10:24:45 crc kubenswrapper[4685]: E1202 10:24:45.705682 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c\": container with ID starting with 1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c not found: ID does not exist" containerID="1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.705825 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c"} err="failed to get container status \"1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c\": rpc error: code = NotFound desc = could not find container \"1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c\": container with ID starting with 1d7732d62cdef93949a49e81e672e7e67baa5fa9e0ac34eafee902895083078c not found: ID does not exist" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.705946 4685 scope.go:117] "RemoveContainer" containerID="b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871" Dec 02 10:24:45 crc kubenswrapper[4685]: E1202 10:24:45.706743 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871\": container with ID starting with b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871 not found: ID does not exist" containerID="b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.706793 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871"} err="failed to get container status \"b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871\": rpc error: code = NotFound desc = could not find container \"b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871\": container with ID starting with b4c22b849dfb027c17ab59a2893115f10b52831ff54592d26ffa34faf5bcb871 not found: ID does not exist" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.816792 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.816856 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/79bd4baf-4a03-43d7-8b3b-3a632474694e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.816903 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.816932 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.816961 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.817175 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.817469 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.817582 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.817713 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.817861 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/79bd4baf-4a03-43d7-8b3b-3a632474694e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.817960 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq4bd\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-kube-api-access-pq4bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.912985 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72a08bf5-82d6-48e0-a188-c7ac46ba22b4" path="/var/lib/kubelet/pods/72a08bf5-82d6-48e0-a188-c7ac46ba22b4/volumes" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.914876 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0366597-4ac4-482d-ba5f-dfa2956d1fb3" path="/var/lib/kubelet/pods/e0366597-4ac4-482d-ba5f-dfa2956d1fb3/volumes" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.919886 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.920383 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.920421 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.920590 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.920772 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.920851 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.920921 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/79bd4baf-4a03-43d7-8b3b-3a632474694e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921075 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq4bd\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-kube-api-access-pq4bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921175 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921246 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/79bd4baf-4a03-43d7-8b3b-3a632474694e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921298 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921345 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921665 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.921957 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.922304 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.922963 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.923049 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/79bd4baf-4a03-43d7-8b3b-3a632474694e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.926548 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/79bd4baf-4a03-43d7-8b3b-3a632474694e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.927308 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.927368 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/79bd4baf-4a03-43d7-8b3b-3a632474694e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.928590 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.945253 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq4bd\" (UniqueName: \"kubernetes.io/projected/79bd4baf-4a03-43d7-8b3b-3a632474694e-kube-api-access-pq4bd\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.970136 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"79bd4baf-4a03-43d7-8b3b-3a632474694e\") " pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:45 crc kubenswrapper[4685]: I1202 10:24:45.973443 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.492253 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 02 10:24:46 crc kubenswrapper[4685]: W1202 10:24:46.509154 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79bd4baf_4a03_43d7_8b3b_3a632474694e.slice/crio-38a4a61ef9b216866c002644f8facf863386e37ce4573441bbcdc920d85dbd85 WatchSource:0}: Error finding container 38a4a61ef9b216866c002644f8facf863386e37ce4573441bbcdc920d85dbd85: Status 404 returned error can't find the container with id 38a4a61ef9b216866c002644f8facf863386e37ce4573441bbcdc920d85dbd85 Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.589988 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f","Type":"ContainerStarted","Data":"10869e9098e545f065be6a107df69456834840ee266d6c316c22283f22395619"} Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.591802 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"79bd4baf-4a03-43d7-8b3b-3a632474694e","Type":"ContainerStarted","Data":"38a4a61ef9b216866c002644f8facf863386e37ce4573441bbcdc920d85dbd85"} Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.677710 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-xnnp9"] Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.679904 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.686013 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.701715 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-xnnp9"] Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.840769 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.840807 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.840850 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.840918 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.840977 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.841013 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.841058 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5q95\" (UniqueName: \"kubernetes.io/projected/ed560052-d1d5-4d21-a0da-2c4167219b7c-kube-api-access-d5q95\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.942418 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.943517 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.943649 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.943876 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.944611 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.944748 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.944906 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.945686 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.946815 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.947523 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.947732 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.948623 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.948829 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5q95\" (UniqueName: \"kubernetes.io/projected/ed560052-d1d5-4d21-a0da-2c4167219b7c-kube-api-access-d5q95\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:46 crc kubenswrapper[4685]: I1202 10:24:46.973391 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5q95\" (UniqueName: \"kubernetes.io/projected/ed560052-d1d5-4d21-a0da-2c4167219b7c-kube-api-access-d5q95\") pod \"dnsmasq-dns-79bd4cc8c9-xnnp9\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:47 crc kubenswrapper[4685]: I1202 10:24:47.085753 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:47 crc kubenswrapper[4685]: I1202 10:24:47.563731 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-xnnp9"] Dec 02 10:24:47 crc kubenswrapper[4685]: I1202 10:24:47.607307 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" event={"ID":"ed560052-d1d5-4d21-a0da-2c4167219b7c","Type":"ContainerStarted","Data":"854ca6ddf5358bee04f662ffc9a5151d23b2160ab9527b7ad7b9fbcb91057493"} Dec 02 10:24:47 crc kubenswrapper[4685]: I1202 10:24:47.609473 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f","Type":"ContainerStarted","Data":"c36b4ba5c7aada3e980728dce0e87ade476ee212d25ae2c04072f15be345c063"} Dec 02 10:24:48 crc kubenswrapper[4685]: I1202 10:24:48.618074 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"79bd4baf-4a03-43d7-8b3b-3a632474694e","Type":"ContainerStarted","Data":"e34704ad90471e555b9d366a6ca4411514dea6dc49d026a7b7697e77a6f6fe43"} Dec 02 10:24:48 crc kubenswrapper[4685]: I1202 10:24:48.619309 4685 generic.go:334] "Generic (PLEG): container finished" podID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerID="1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a" exitCode=0 Dec 02 10:24:48 crc kubenswrapper[4685]: I1202 10:24:48.619388 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" event={"ID":"ed560052-d1d5-4d21-a0da-2c4167219b7c","Type":"ContainerDied","Data":"1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a"} Dec 02 10:24:50 crc kubenswrapper[4685]: I1202 10:24:50.638890 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" event={"ID":"ed560052-d1d5-4d21-a0da-2c4167219b7c","Type":"ContainerStarted","Data":"2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e"} Dec 02 10:24:50 crc kubenswrapper[4685]: I1202 10:24:50.639469 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:50 crc kubenswrapper[4685]: I1202 10:24:50.670399 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" podStartSLOduration=4.670357276 podStartE2EDuration="4.670357276s" podCreationTimestamp="2025-12-02 10:24:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:24:50.660402618 +0000 UTC m=+1383.032176792" watchObservedRunningTime="2025-12-02 10:24:50.670357276 +0000 UTC m=+1383.042131440" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.087911 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.167894 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6wkt5"] Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.168129 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerName="dnsmasq-dns" containerID="cri-o://5d226dd264a1d2858facd3ce1ee5f5dfe665c66e1610a345b76e238e053dd5f7" gracePeriod=10 Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.338866 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cd9bffc9-gpxbh"] Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.340702 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.359657 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cd9bffc9-gpxbh"] Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461632 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461677 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txzkn\" (UniqueName: \"kubernetes.io/projected/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-kube-api-access-txzkn\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461697 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-config\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461766 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461801 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461825 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-dns-svc\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.461853 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.563867 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.564006 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.564031 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txzkn\" (UniqueName: \"kubernetes.io/projected/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-kube-api-access-txzkn\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.564050 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-config\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.564079 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.564111 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.564136 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-dns-svc\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.565075 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-config\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.565103 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.565315 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-dns-svc\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.565365 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.565421 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.565608 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.593504 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txzkn\" (UniqueName: \"kubernetes.io/projected/7e14e196-7cc2-49ba-8fd9-fdafa0c0727d-kube-api-access-txzkn\") pod \"dnsmasq-dns-6cd9bffc9-gpxbh\" (UID: \"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d\") " pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.667445 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.706922 4685 generic.go:334] "Generic (PLEG): container finished" podID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerID="5d226dd264a1d2858facd3ce1ee5f5dfe665c66e1610a345b76e238e053dd5f7" exitCode=0 Dec 02 10:24:57 crc kubenswrapper[4685]: I1202 10:24:57.706962 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" event={"ID":"8a3422f7-351c-480e-8d7e-abd5ba07962f","Type":"ContainerDied","Data":"5d226dd264a1d2858facd3ce1ee5f5dfe665c66e1610a345b76e238e053dd5f7"} Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.158933 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.177437 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cd9bffc9-gpxbh"] Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.277758 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5n7nk\" (UniqueName: \"kubernetes.io/projected/8a3422f7-351c-480e-8d7e-abd5ba07962f-kube-api-access-5n7nk\") pod \"8a3422f7-351c-480e-8d7e-abd5ba07962f\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.277801 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-swift-storage-0\") pod \"8a3422f7-351c-480e-8d7e-abd5ba07962f\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.277951 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-svc\") pod \"8a3422f7-351c-480e-8d7e-abd5ba07962f\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.277983 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-nb\") pod \"8a3422f7-351c-480e-8d7e-abd5ba07962f\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.278090 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-config\") pod \"8a3422f7-351c-480e-8d7e-abd5ba07962f\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.278123 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-sb\") pod \"8a3422f7-351c-480e-8d7e-abd5ba07962f\" (UID: \"8a3422f7-351c-480e-8d7e-abd5ba07962f\") " Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.301402 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a3422f7-351c-480e-8d7e-abd5ba07962f-kube-api-access-5n7nk" (OuterVolumeSpecName: "kube-api-access-5n7nk") pod "8a3422f7-351c-480e-8d7e-abd5ba07962f" (UID: "8a3422f7-351c-480e-8d7e-abd5ba07962f"). InnerVolumeSpecName "kube-api-access-5n7nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.351030 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-config" (OuterVolumeSpecName: "config") pod "8a3422f7-351c-480e-8d7e-abd5ba07962f" (UID: "8a3422f7-351c-480e-8d7e-abd5ba07962f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.364785 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8a3422f7-351c-480e-8d7e-abd5ba07962f" (UID: "8a3422f7-351c-480e-8d7e-abd5ba07962f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.375244 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8a3422f7-351c-480e-8d7e-abd5ba07962f" (UID: "8a3422f7-351c-480e-8d7e-abd5ba07962f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.378954 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8a3422f7-351c-480e-8d7e-abd5ba07962f" (UID: "8a3422f7-351c-480e-8d7e-abd5ba07962f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.380327 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.380352 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.380366 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5n7nk\" (UniqueName: \"kubernetes.io/projected/8a3422f7-351c-480e-8d7e-abd5ba07962f-kube-api-access-5n7nk\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.380377 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.380387 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.385189 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8a3422f7-351c-480e-8d7e-abd5ba07962f" (UID: "8a3422f7-351c-480e-8d7e-abd5ba07962f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.482763 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a3422f7-351c-480e-8d7e-abd5ba07962f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.725777 4685 generic.go:334] "Generic (PLEG): container finished" podID="7e14e196-7cc2-49ba-8fd9-fdafa0c0727d" containerID="ad6496c0e2bfdbe5a75bebc88c7fdfc6e222ff897bbb02f32d1f8a8c0dcaea92" exitCode=0 Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.725900 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" event={"ID":"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d","Type":"ContainerDied","Data":"ad6496c0e2bfdbe5a75bebc88c7fdfc6e222ff897bbb02f32d1f8a8c0dcaea92"} Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.725949 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" event={"ID":"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d","Type":"ContainerStarted","Data":"fbdcc5f0a0ced13248dd8f8882ca6440599db74a8fe215de54aaad1fe79ab83e"} Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.734869 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" event={"ID":"8a3422f7-351c-480e-8d7e-abd5ba07962f","Type":"ContainerDied","Data":"41b8ce480bb52f87c8ecbec5f4377cb52948ddde5509795d96db2a779b47748d"} Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.734940 4685 scope.go:117] "RemoveContainer" containerID="5d226dd264a1d2858facd3ce1ee5f5dfe665c66e1610a345b76e238e053dd5f7" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.735117 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6wkt5" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.871591 4685 scope.go:117] "RemoveContainer" containerID="05202a5c7d5bddff79fde775886a1ffca7cd47e9ba4cbaf1382b575516d173a3" Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.895576 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6wkt5"] Dec 02 10:24:58 crc kubenswrapper[4685]: I1202 10:24:58.907578 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6wkt5"] Dec 02 10:24:59 crc kubenswrapper[4685]: I1202 10:24:59.750961 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" event={"ID":"7e14e196-7cc2-49ba-8fd9-fdafa0c0727d","Type":"ContainerStarted","Data":"ea79e48214545aad25ce1097fef376f10633fcae3bed658c97479e3c7af77d0b"} Dec 02 10:24:59 crc kubenswrapper[4685]: I1202 10:24:59.751279 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:24:59 crc kubenswrapper[4685]: I1202 10:24:59.779773 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" podStartSLOduration=2.779751828 podStartE2EDuration="2.779751828s" podCreationTimestamp="2025-12-02 10:24:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:24:59.773171281 +0000 UTC m=+1392.144945435" watchObservedRunningTime="2025-12-02 10:24:59.779751828 +0000 UTC m=+1392.151525992" Dec 02 10:24:59 crc kubenswrapper[4685]: I1202 10:24:59.913258 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" path="/var/lib/kubelet/pods/8a3422f7-351c-480e-8d7e-abd5ba07962f/volumes" Dec 02 10:25:07 crc kubenswrapper[4685]: I1202 10:25:07.669338 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cd9bffc9-gpxbh" Dec 02 10:25:07 crc kubenswrapper[4685]: I1202 10:25:07.737365 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-xnnp9"] Dec 02 10:25:07 crc kubenswrapper[4685]: I1202 10:25:07.737633 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerName="dnsmasq-dns" containerID="cri-o://2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e" gracePeriod=10 Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.179400 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.295822 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-openstack-edpm-ipam\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.295983 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.296156 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-svc\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.296222 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5q95\" (UniqueName: \"kubernetes.io/projected/ed560052-d1d5-4d21-a0da-2c4167219b7c-kube-api-access-d5q95\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.296300 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-sb\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.296388 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-swift-storage-0\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.296537 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-nb\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.309762 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed560052-d1d5-4d21-a0da-2c4167219b7c-kube-api-access-d5q95" (OuterVolumeSpecName: "kube-api-access-d5q95") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "kube-api-access-d5q95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.390736 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.395025 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.401326 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config" (OuterVolumeSpecName: "config") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.401434 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config\") pod \"ed560052-d1d5-4d21-a0da-2c4167219b7c\" (UID: \"ed560052-d1d5-4d21-a0da-2c4167219b7c\") " Dec 02 10:25:08 crc kubenswrapper[4685]: W1202 10:25:08.401496 4685 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/ed560052-d1d5-4d21-a0da-2c4167219b7c/volumes/kubernetes.io~configmap/config Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.401517 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config" (OuterVolumeSpecName: "config") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.402111 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.402133 4685 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.402142 4685 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.402151 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5q95\" (UniqueName: \"kubernetes.io/projected/ed560052-d1d5-4d21-a0da-2c4167219b7c-kube-api-access-d5q95\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.407217 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.415039 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.422960 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ed560052-d1d5-4d21-a0da-2c4167219b7c" (UID: "ed560052-d1d5-4d21-a0da-2c4167219b7c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.504171 4685 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.504221 4685 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.504238 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ed560052-d1d5-4d21-a0da-2c4167219b7c-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.840318 4685 generic.go:334] "Generic (PLEG): container finished" podID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerID="2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e" exitCode=0 Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.840369 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" event={"ID":"ed560052-d1d5-4d21-a0da-2c4167219b7c","Type":"ContainerDied","Data":"2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e"} Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.840374 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.840415 4685 scope.go:117] "RemoveContainer" containerID="2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.840401 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-xnnp9" event={"ID":"ed560052-d1d5-4d21-a0da-2c4167219b7c","Type":"ContainerDied","Data":"854ca6ddf5358bee04f662ffc9a5151d23b2160ab9527b7ad7b9fbcb91057493"} Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.860654 4685 scope.go:117] "RemoveContainer" containerID="1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.882703 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-xnnp9"] Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.891647 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-xnnp9"] Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.916997 4685 scope.go:117] "RemoveContainer" containerID="2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e" Dec 02 10:25:08 crc kubenswrapper[4685]: E1202 10:25:08.917423 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e\": container with ID starting with 2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e not found: ID does not exist" containerID="2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.917451 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e"} err="failed to get container status \"2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e\": rpc error: code = NotFound desc = could not find container \"2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e\": container with ID starting with 2a40ac162c4b363a545e0c04629b7d60d46985bcdc2a3cf05e420679a9ff216e not found: ID does not exist" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.917474 4685 scope.go:117] "RemoveContainer" containerID="1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a" Dec 02 10:25:08 crc kubenswrapper[4685]: E1202 10:25:08.917904 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a\": container with ID starting with 1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a not found: ID does not exist" containerID="1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a" Dec 02 10:25:08 crc kubenswrapper[4685]: I1202 10:25:08.917927 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a"} err="failed to get container status \"1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a\": rpc error: code = NotFound desc = could not find container \"1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a\": container with ID starting with 1702d05c3981d3a3801974194302e6ce74aaa79cc1c1c705b8af0a8f3837526a not found: ID does not exist" Dec 02 10:25:09 crc kubenswrapper[4685]: I1202 10:25:09.909735 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" path="/var/lib/kubelet/pods/ed560052-d1d5-4d21-a0da-2c4167219b7c/volumes" Dec 02 10:25:19 crc kubenswrapper[4685]: I1202 10:25:19.956409 4685 generic.go:334] "Generic (PLEG): container finished" podID="622e7d58-a7d5-4898-a94b-ac66e3d0ee7f" containerID="c36b4ba5c7aada3e980728dce0e87ade476ee212d25ae2c04072f15be345c063" exitCode=0 Dec 02 10:25:19 crc kubenswrapper[4685]: I1202 10:25:19.956523 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f","Type":"ContainerDied","Data":"c36b4ba5c7aada3e980728dce0e87ade476ee212d25ae2c04072f15be345c063"} Dec 02 10:25:20 crc kubenswrapper[4685]: I1202 10:25:20.967744 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"622e7d58-a7d5-4898-a94b-ac66e3d0ee7f","Type":"ContainerStarted","Data":"51137bb59991003bfcd6f8e0b41ab3656381195ae0da075e9419994bcd1df868"} Dec 02 10:25:20 crc kubenswrapper[4685]: I1202 10:25:20.968291 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 02 10:25:20 crc kubenswrapper[4685]: I1202 10:25:20.971008 4685 generic.go:334] "Generic (PLEG): container finished" podID="79bd4baf-4a03-43d7-8b3b-3a632474694e" containerID="e34704ad90471e555b9d366a6ca4411514dea6dc49d026a7b7697e77a6f6fe43" exitCode=0 Dec 02 10:25:20 crc kubenswrapper[4685]: I1202 10:25:20.971058 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"79bd4baf-4a03-43d7-8b3b-3a632474694e","Type":"ContainerDied","Data":"e34704ad90471e555b9d366a6ca4411514dea6dc49d026a7b7697e77a6f6fe43"} Dec 02 10:25:21 crc kubenswrapper[4685]: I1202 10:25:21.001391 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.00136543 podStartE2EDuration="37.00136543s" podCreationTimestamp="2025-12-02 10:24:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:25:20.994012252 +0000 UTC m=+1413.365786416" watchObservedRunningTime="2025-12-02 10:25:21.00136543 +0000 UTC m=+1413.373139594" Dec 02 10:25:21 crc kubenswrapper[4685]: I1202 10:25:21.981768 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"79bd4baf-4a03-43d7-8b3b-3a632474694e","Type":"ContainerStarted","Data":"0da49f01e5df783cdb18f93cbd23a0f599200dbc2c60055453ca537db363bdc6"} Dec 02 10:25:21 crc kubenswrapper[4685]: I1202 10:25:21.982323 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:25:22 crc kubenswrapper[4685]: I1202 10:25:22.021606 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.02158111 podStartE2EDuration="37.02158111s" podCreationTimestamp="2025-12-02 10:24:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 10:25:22.006243758 +0000 UTC m=+1414.378017942" watchObservedRunningTime="2025-12-02 10:25:22.02158111 +0000 UTC m=+1414.393355274" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.861016 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pjtvl"] Dec 02 10:25:25 crc kubenswrapper[4685]: E1202 10:25:25.862018 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerName="dnsmasq-dns" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.862035 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerName="dnsmasq-dns" Dec 02 10:25:25 crc kubenswrapper[4685]: E1202 10:25:25.862052 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerName="init" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.862060 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerName="init" Dec 02 10:25:25 crc kubenswrapper[4685]: E1202 10:25:25.862073 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerName="init" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.862081 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerName="init" Dec 02 10:25:25 crc kubenswrapper[4685]: E1202 10:25:25.862092 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerName="dnsmasq-dns" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.862100 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerName="dnsmasq-dns" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.862327 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed560052-d1d5-4d21-a0da-2c4167219b7c" containerName="dnsmasq-dns" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.862359 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a3422f7-351c-480e-8d7e-abd5ba07962f" containerName="dnsmasq-dns" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.867643 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:25 crc kubenswrapper[4685]: I1202 10:25:25.877568 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pjtvl"] Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.027503 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9vdf\" (UniqueName: \"kubernetes.io/projected/500bc619-f381-4f27-8a22-ed512d9fff8d-kube-api-access-h9vdf\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.027852 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-catalog-content\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.027964 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-utilities\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.130076 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9vdf\" (UniqueName: \"kubernetes.io/projected/500bc619-f381-4f27-8a22-ed512d9fff8d-kube-api-access-h9vdf\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.130685 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-catalog-content\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.130820 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-utilities\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.131191 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-catalog-content\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.131598 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-utilities\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.149690 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp"] Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.151362 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.155140 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9vdf\" (UniqueName: \"kubernetes.io/projected/500bc619-f381-4f27-8a22-ed512d9fff8d-kube-api-access-h9vdf\") pod \"community-operators-pjtvl\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.162403 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp"] Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.169402 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.170488 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.170919 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.171187 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.189043 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.346424 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcgpz\" (UniqueName: \"kubernetes.io/projected/4c22e2d6-7984-4d74-9202-ea57de627392-kube-api-access-gcgpz\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.347442 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.347546 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.347616 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.449736 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.449816 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.449943 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcgpz\" (UniqueName: \"kubernetes.io/projected/4c22e2d6-7984-4d74-9202-ea57de627392-kube-api-access-gcgpz\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.449989 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.461660 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.461932 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.472425 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcgpz\" (UniqueName: \"kubernetes.io/projected/4c22e2d6-7984-4d74-9202-ea57de627392-kube-api-access-gcgpz\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.472974 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.633378 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:26 crc kubenswrapper[4685]: I1202 10:25:26.754759 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pjtvl"] Dec 02 10:25:27 crc kubenswrapper[4685]: I1202 10:25:27.034452 4685 generic.go:334] "Generic (PLEG): container finished" podID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerID="da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1" exitCode=0 Dec 02 10:25:27 crc kubenswrapper[4685]: I1202 10:25:27.034494 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerDied","Data":"da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1"} Dec 02 10:25:27 crc kubenswrapper[4685]: I1202 10:25:27.034740 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerStarted","Data":"2f2f0a0cc40f9d1861f0c63eb509b3a7ff091261a60c2bb5af286dd0cd591d0d"} Dec 02 10:25:27 crc kubenswrapper[4685]: I1202 10:25:27.295477 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp"] Dec 02 10:25:28 crc kubenswrapper[4685]: I1202 10:25:28.048239 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" event={"ID":"4c22e2d6-7984-4d74-9202-ea57de627392","Type":"ContainerStarted","Data":"0fa8f25a979c00cc95412445b8a1109918a548fce7b91a062f274d75d6615db1"} Dec 02 10:25:29 crc kubenswrapper[4685]: I1202 10:25:29.063049 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerStarted","Data":"1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e"} Dec 02 10:25:30 crc kubenswrapper[4685]: I1202 10:25:30.076446 4685 generic.go:334] "Generic (PLEG): container finished" podID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerID="1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e" exitCode=0 Dec 02 10:25:30 crc kubenswrapper[4685]: I1202 10:25:30.076681 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerDied","Data":"1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e"} Dec 02 10:25:34 crc kubenswrapper[4685]: I1202 10:25:34.995770 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 02 10:25:35 crc kubenswrapper[4685]: I1202 10:25:35.977229 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 02 10:25:40 crc kubenswrapper[4685]: I1202 10:25:40.206023 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerStarted","Data":"0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562"} Dec 02 10:25:40 crc kubenswrapper[4685]: I1202 10:25:40.207817 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" event={"ID":"4c22e2d6-7984-4d74-9202-ea57de627392","Type":"ContainerStarted","Data":"5984f2257fec3afdd12ed08fb9510fbc1a9afbfc8427b74e03d7fbe47a6170a0"} Dec 02 10:25:40 crc kubenswrapper[4685]: I1202 10:25:40.237876 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pjtvl" podStartSLOduration=3.098480447 podStartE2EDuration="15.237854296s" podCreationTimestamp="2025-12-02 10:25:25 +0000 UTC" firstStartedPulling="2025-12-02 10:25:27.038669977 +0000 UTC m=+1419.410444131" lastFinishedPulling="2025-12-02 10:25:39.178043826 +0000 UTC m=+1431.549817980" observedRunningTime="2025-12-02 10:25:40.227517381 +0000 UTC m=+1432.599291565" watchObservedRunningTime="2025-12-02 10:25:40.237854296 +0000 UTC m=+1432.609628450" Dec 02 10:25:40 crc kubenswrapper[4685]: I1202 10:25:40.251879 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" podStartSLOduration=2.348044394 podStartE2EDuration="14.251856902s" podCreationTimestamp="2025-12-02 10:25:26 +0000 UTC" firstStartedPulling="2025-12-02 10:25:27.297774696 +0000 UTC m=+1419.669548850" lastFinishedPulling="2025-12-02 10:25:39.201587204 +0000 UTC m=+1431.573361358" observedRunningTime="2025-12-02 10:25:40.250997827 +0000 UTC m=+1432.622771981" watchObservedRunningTime="2025-12-02 10:25:40.251856902 +0000 UTC m=+1432.623631056" Dec 02 10:25:46 crc kubenswrapper[4685]: I1202 10:25:46.190619 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:46 crc kubenswrapper[4685]: I1202 10:25:46.191735 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:46 crc kubenswrapper[4685]: I1202 10:25:46.236002 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:46 crc kubenswrapper[4685]: I1202 10:25:46.320377 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:46 crc kubenswrapper[4685]: I1202 10:25:46.479309 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pjtvl"] Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.286835 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pjtvl" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="registry-server" containerID="cri-o://0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562" gracePeriod=2 Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.773208 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.915009 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-catalog-content\") pod \"500bc619-f381-4f27-8a22-ed512d9fff8d\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.915297 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-utilities\") pod \"500bc619-f381-4f27-8a22-ed512d9fff8d\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.915500 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9vdf\" (UniqueName: \"kubernetes.io/projected/500bc619-f381-4f27-8a22-ed512d9fff8d-kube-api-access-h9vdf\") pod \"500bc619-f381-4f27-8a22-ed512d9fff8d\" (UID: \"500bc619-f381-4f27-8a22-ed512d9fff8d\") " Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.916118 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-utilities" (OuterVolumeSpecName: "utilities") pod "500bc619-f381-4f27-8a22-ed512d9fff8d" (UID: "500bc619-f381-4f27-8a22-ed512d9fff8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.916811 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.920685 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500bc619-f381-4f27-8a22-ed512d9fff8d-kube-api-access-h9vdf" (OuterVolumeSpecName: "kube-api-access-h9vdf") pod "500bc619-f381-4f27-8a22-ed512d9fff8d" (UID: "500bc619-f381-4f27-8a22-ed512d9fff8d"). InnerVolumeSpecName "kube-api-access-h9vdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:25:48 crc kubenswrapper[4685]: I1202 10:25:48.961123 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "500bc619-f381-4f27-8a22-ed512d9fff8d" (UID: "500bc619-f381-4f27-8a22-ed512d9fff8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.018159 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9vdf\" (UniqueName: \"kubernetes.io/projected/500bc619-f381-4f27-8a22-ed512d9fff8d-kube-api-access-h9vdf\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.018386 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/500bc619-f381-4f27-8a22-ed512d9fff8d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.298115 4685 generic.go:334] "Generic (PLEG): container finished" podID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerID="0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562" exitCode=0 Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.298214 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjtvl" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.298249 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerDied","Data":"0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562"} Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.299401 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjtvl" event={"ID":"500bc619-f381-4f27-8a22-ed512d9fff8d","Type":"ContainerDied","Data":"2f2f0a0cc40f9d1861f0c63eb509b3a7ff091261a60c2bb5af286dd0cd591d0d"} Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.299423 4685 scope.go:117] "RemoveContainer" containerID="0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.338199 4685 scope.go:117] "RemoveContainer" containerID="1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.349114 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pjtvl"] Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.391757 4685 scope.go:117] "RemoveContainer" containerID="da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.395567 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pjtvl"] Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.422341 4685 scope.go:117] "RemoveContainer" containerID="0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562" Dec 02 10:25:49 crc kubenswrapper[4685]: E1202 10:25:49.422827 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562\": container with ID starting with 0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562 not found: ID does not exist" containerID="0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.422864 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562"} err="failed to get container status \"0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562\": rpc error: code = NotFound desc = could not find container \"0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562\": container with ID starting with 0a98432be91e937ae5b41e0c0f5f621bf810baaa1789cb597f68c282a3b08562 not found: ID does not exist" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.422888 4685 scope.go:117] "RemoveContainer" containerID="1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e" Dec 02 10:25:49 crc kubenswrapper[4685]: E1202 10:25:49.423259 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e\": container with ID starting with 1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e not found: ID does not exist" containerID="1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.423327 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e"} err="failed to get container status \"1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e\": rpc error: code = NotFound desc = could not find container \"1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e\": container with ID starting with 1be6af8462fd3bb888779d9bac08c6c6d85184c5fe7d9ab9dbc238c699c65c7e not found: ID does not exist" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.423364 4685 scope.go:117] "RemoveContainer" containerID="da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1" Dec 02 10:25:49 crc kubenswrapper[4685]: E1202 10:25:49.423856 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1\": container with ID starting with da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1 not found: ID does not exist" containerID="da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.423886 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1"} err="failed to get container status \"da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1\": rpc error: code = NotFound desc = could not find container \"da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1\": container with ID starting with da1f2cfd581e3e60fb293963849451f4a435c7d147f6abb8f6a5eecd6ebb47f1 not found: ID does not exist" Dec 02 10:25:49 crc kubenswrapper[4685]: I1202 10:25:49.909760 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" path="/var/lib/kubelet/pods/500bc619-f381-4f27-8a22-ed512d9fff8d/volumes" Dec 02 10:25:51 crc kubenswrapper[4685]: I1202 10:25:51.318174 4685 generic.go:334] "Generic (PLEG): container finished" podID="4c22e2d6-7984-4d74-9202-ea57de627392" containerID="5984f2257fec3afdd12ed08fb9510fbc1a9afbfc8427b74e03d7fbe47a6170a0" exitCode=0 Dec 02 10:25:51 crc kubenswrapper[4685]: I1202 10:25:51.318261 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" event={"ID":"4c22e2d6-7984-4d74-9202-ea57de627392","Type":"ContainerDied","Data":"5984f2257fec3afdd12ed08fb9510fbc1a9afbfc8427b74e03d7fbe47a6170a0"} Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.744625 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.900699 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcgpz\" (UniqueName: \"kubernetes.io/projected/4c22e2d6-7984-4d74-9202-ea57de627392-kube-api-access-gcgpz\") pod \"4c22e2d6-7984-4d74-9202-ea57de627392\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.900856 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-inventory\") pod \"4c22e2d6-7984-4d74-9202-ea57de627392\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.900923 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-repo-setup-combined-ca-bundle\") pod \"4c22e2d6-7984-4d74-9202-ea57de627392\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.900973 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-ssh-key\") pod \"4c22e2d6-7984-4d74-9202-ea57de627392\" (UID: \"4c22e2d6-7984-4d74-9202-ea57de627392\") " Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.907309 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "4c22e2d6-7984-4d74-9202-ea57de627392" (UID: "4c22e2d6-7984-4d74-9202-ea57de627392"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.907960 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c22e2d6-7984-4d74-9202-ea57de627392-kube-api-access-gcgpz" (OuterVolumeSpecName: "kube-api-access-gcgpz") pod "4c22e2d6-7984-4d74-9202-ea57de627392" (UID: "4c22e2d6-7984-4d74-9202-ea57de627392"). InnerVolumeSpecName "kube-api-access-gcgpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.931603 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4c22e2d6-7984-4d74-9202-ea57de627392" (UID: "4c22e2d6-7984-4d74-9202-ea57de627392"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:25:52 crc kubenswrapper[4685]: I1202 10:25:52.937441 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-inventory" (OuterVolumeSpecName: "inventory") pod "4c22e2d6-7984-4d74-9202-ea57de627392" (UID: "4c22e2d6-7984-4d74-9202-ea57de627392"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.003542 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcgpz\" (UniqueName: \"kubernetes.io/projected/4c22e2d6-7984-4d74-9202-ea57de627392-kube-api-access-gcgpz\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.003588 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.003598 4685 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.003609 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4c22e2d6-7984-4d74-9202-ea57de627392-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.336644 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" event={"ID":"4c22e2d6-7984-4d74-9202-ea57de627392","Type":"ContainerDied","Data":"0fa8f25a979c00cc95412445b8a1109918a548fce7b91a062f274d75d6615db1"} Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.336698 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fa8f25a979c00cc95412445b8a1109918a548fce7b91a062f274d75d6615db1" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.336771 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417154 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r"] Dec 02 10:25:53 crc kubenswrapper[4685]: E1202 10:25:53.417523 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="registry-server" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417539 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="registry-server" Dec 02 10:25:53 crc kubenswrapper[4685]: E1202 10:25:53.417580 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="extract-utilities" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417590 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="extract-utilities" Dec 02 10:25:53 crc kubenswrapper[4685]: E1202 10:25:53.417603 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c22e2d6-7984-4d74-9202-ea57de627392" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417610 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c22e2d6-7984-4d74-9202-ea57de627392" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 10:25:53 crc kubenswrapper[4685]: E1202 10:25:53.417638 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="extract-content" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417643 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="extract-content" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417831 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="500bc619-f381-4f27-8a22-ed512d9fff8d" containerName="registry-server" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.417865 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c22e2d6-7984-4d74-9202-ea57de627392" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.418515 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.420385 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.420981 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.422473 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.434582 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r"] Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.436429 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.513011 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9wxf\" (UniqueName: \"kubernetes.io/projected/d80fcbde-80c5-4c5c-a42d-f95348702600-kube-api-access-v9wxf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.513129 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.513246 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.615512 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9wxf\" (UniqueName: \"kubernetes.io/projected/d80fcbde-80c5-4c5c-a42d-f95348702600-kube-api-access-v9wxf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.615599 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.615677 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.619084 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.619346 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.636964 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9wxf\" (UniqueName: \"kubernetes.io/projected/d80fcbde-80c5-4c5c-a42d-f95348702600-kube-api-access-v9wxf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-8kv9r\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:53 crc kubenswrapper[4685]: I1202 10:25:53.736930 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:25:54 crc kubenswrapper[4685]: I1202 10:25:54.257616 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r"] Dec 02 10:25:54 crc kubenswrapper[4685]: W1202 10:25:54.266901 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd80fcbde_80c5_4c5c_a42d_f95348702600.slice/crio-ca2f54336204b375bbd0754f286d05b41a209fe213672d2c5cbbf473260dea22 WatchSource:0}: Error finding container ca2f54336204b375bbd0754f286d05b41a209fe213672d2c5cbbf473260dea22: Status 404 returned error can't find the container with id ca2f54336204b375bbd0754f286d05b41a209fe213672d2c5cbbf473260dea22 Dec 02 10:25:54 crc kubenswrapper[4685]: I1202 10:25:54.349489 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" event={"ID":"d80fcbde-80c5-4c5c-a42d-f95348702600","Type":"ContainerStarted","Data":"ca2f54336204b375bbd0754f286d05b41a209fe213672d2c5cbbf473260dea22"} Dec 02 10:25:55 crc kubenswrapper[4685]: I1202 10:25:55.360061 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" event={"ID":"d80fcbde-80c5-4c5c-a42d-f95348702600","Type":"ContainerStarted","Data":"e4d2621f4fd2e43f9ee93a5bc9b362286bc67c029e4a7ab4f9bd237c0b588a66"} Dec 02 10:25:55 crc kubenswrapper[4685]: I1202 10:25:55.388546 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" podStartSLOduration=1.901081299 podStartE2EDuration="2.38852875s" podCreationTimestamp="2025-12-02 10:25:53 +0000 UTC" firstStartedPulling="2025-12-02 10:25:54.270037755 +0000 UTC m=+1446.641811909" lastFinishedPulling="2025-12-02 10:25:54.757485206 +0000 UTC m=+1447.129259360" observedRunningTime="2025-12-02 10:25:55.376080607 +0000 UTC m=+1447.747854761" watchObservedRunningTime="2025-12-02 10:25:55.38852875 +0000 UTC m=+1447.760302894" Dec 02 10:25:58 crc kubenswrapper[4685]: I1202 10:25:58.390510 4685 generic.go:334] "Generic (PLEG): container finished" podID="d80fcbde-80c5-4c5c-a42d-f95348702600" containerID="e4d2621f4fd2e43f9ee93a5bc9b362286bc67c029e4a7ab4f9bd237c0b588a66" exitCode=0 Dec 02 10:25:58 crc kubenswrapper[4685]: I1202 10:25:58.390628 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" event={"ID":"d80fcbde-80c5-4c5c-a42d-f95348702600","Type":"ContainerDied","Data":"e4d2621f4fd2e43f9ee93a5bc9b362286bc67c029e4a7ab4f9bd237c0b588a66"} Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.626393 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.754528 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9wxf\" (UniqueName: \"kubernetes.io/projected/d80fcbde-80c5-4c5c-a42d-f95348702600-kube-api-access-v9wxf\") pod \"d80fcbde-80c5-4c5c-a42d-f95348702600\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.754880 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-ssh-key\") pod \"d80fcbde-80c5-4c5c-a42d-f95348702600\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.755069 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-inventory\") pod \"d80fcbde-80c5-4c5c-a42d-f95348702600\" (UID: \"d80fcbde-80c5-4c5c-a42d-f95348702600\") " Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.760974 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d80fcbde-80c5-4c5c-a42d-f95348702600-kube-api-access-v9wxf" (OuterVolumeSpecName: "kube-api-access-v9wxf") pod "d80fcbde-80c5-4c5c-a42d-f95348702600" (UID: "d80fcbde-80c5-4c5c-a42d-f95348702600"). InnerVolumeSpecName "kube-api-access-v9wxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.787016 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-inventory" (OuterVolumeSpecName: "inventory") pod "d80fcbde-80c5-4c5c-a42d-f95348702600" (UID: "d80fcbde-80c5-4c5c-a42d-f95348702600"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.787193 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d80fcbde-80c5-4c5c-a42d-f95348702600" (UID: "d80fcbde-80c5-4c5c-a42d-f95348702600"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.857954 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.857986 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d80fcbde-80c5-4c5c-a42d-f95348702600-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:26:00 crc kubenswrapper[4685]: I1202 10:26:00.857997 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9wxf\" (UniqueName: \"kubernetes.io/projected/d80fcbde-80c5-4c5c-a42d-f95348702600-kube-api-access-v9wxf\") on node \"crc\" DevicePath \"\"" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.422898 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" event={"ID":"d80fcbde-80c5-4c5c-a42d-f95348702600","Type":"ContainerDied","Data":"ca2f54336204b375bbd0754f286d05b41a209fe213672d2c5cbbf473260dea22"} Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.422944 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca2f54336204b375bbd0754f286d05b41a209fe213672d2c5cbbf473260dea22" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.422971 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-8kv9r" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.766739 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm"] Dec 02 10:26:01 crc kubenswrapper[4685]: E1202 10:26:01.767362 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d80fcbde-80c5-4c5c-a42d-f95348702600" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.767384 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d80fcbde-80c5-4c5c-a42d-f95348702600" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.767800 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d80fcbde-80c5-4c5c-a42d-f95348702600" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.769258 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.771437 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.773711 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.774608 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.774814 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.774858 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.774932 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.775024 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlfk6\" (UniqueName: \"kubernetes.io/projected/d8a0aadd-4e71-45c1-a810-fae1955f590f-kube-api-access-dlfk6\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.778944 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm"] Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.791693 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.875622 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.875707 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlfk6\" (UniqueName: \"kubernetes.io/projected/d8a0aadd-4e71-45c1-a810-fae1955f590f-kube-api-access-dlfk6\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.875757 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.875783 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.880416 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.881127 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.882701 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.893813 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlfk6\" (UniqueName: \"kubernetes.io/projected/d8a0aadd-4e71-45c1-a810-fae1955f590f-kube-api-access-dlfk6\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.958106 4685 scope.go:117] "RemoveContainer" containerID="d91d88942af04a2dbe44f8ded4030504a1a921f7253df6947bafaba58dac1f0d" Dec 02 10:26:01 crc kubenswrapper[4685]: I1202 10:26:01.985975 4685 scope.go:117] "RemoveContainer" containerID="37c7e57bf2150e4b0d10d79485673c6569b052aacfbc89676ebc71ae67d21d59" Dec 02 10:26:02 crc kubenswrapper[4685]: I1202 10:26:02.016453 4685 scope.go:117] "RemoveContainer" containerID="2c8f1cc614cfe626b938571a3f94e9d411ab779506a85c9914cba88c1da5d0ab" Dec 02 10:26:02 crc kubenswrapper[4685]: I1202 10:26:02.093328 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:26:02 crc kubenswrapper[4685]: I1202 10:26:02.675497 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm"] Dec 02 10:26:03 crc kubenswrapper[4685]: I1202 10:26:03.439713 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" event={"ID":"d8a0aadd-4e71-45c1-a810-fae1955f590f","Type":"ContainerStarted","Data":"d4e67d41ae255b75a706d24e0ee4205071dcd115f502c472bb2426d0ed43ac22"} Dec 02 10:26:04 crc kubenswrapper[4685]: I1202 10:26:04.449039 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" event={"ID":"d8a0aadd-4e71-45c1-a810-fae1955f590f","Type":"ContainerStarted","Data":"2ecd02fbbece4aa6225c202efbd0db8e585871c4764151e96f6e181bb33d2418"} Dec 02 10:26:04 crc kubenswrapper[4685]: I1202 10:26:04.472325 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" podStartSLOduration=2.437004969 podStartE2EDuration="3.472301154s" podCreationTimestamp="2025-12-02 10:26:01 +0000 UTC" firstStartedPulling="2025-12-02 10:26:02.679155373 +0000 UTC m=+1455.050929517" lastFinishedPulling="2025-12-02 10:26:03.714451548 +0000 UTC m=+1456.086225702" observedRunningTime="2025-12-02 10:26:04.463492591 +0000 UTC m=+1456.835266745" watchObservedRunningTime="2025-12-02 10:26:04.472301154 +0000 UTC m=+1456.844075318" Dec 02 10:26:12 crc kubenswrapper[4685]: I1202 10:26:12.147617 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:26:12 crc kubenswrapper[4685]: I1202 10:26:12.148116 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:26:42 crc kubenswrapper[4685]: I1202 10:26:42.147218 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:26:42 crc kubenswrapper[4685]: I1202 10:26:42.147859 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:27:02 crc kubenswrapper[4685]: I1202 10:27:02.161079 4685 scope.go:117] "RemoveContainer" containerID="2d509261c9f62fce41299f0f9b3334a812421998abe7e1d7cb0de9f9bb1e0e16" Dec 02 10:27:02 crc kubenswrapper[4685]: I1202 10:27:02.207641 4685 scope.go:117] "RemoveContainer" containerID="577999921edcc5b692f156a00b9dc635423c1554902c40cbf73e4f5bacdc17cd" Dec 02 10:27:02 crc kubenswrapper[4685]: I1202 10:27:02.252192 4685 scope.go:117] "RemoveContainer" containerID="8260809ae593e36bc6df5306fd4d55954f7843f3949584651ae0f038890131cc" Dec 02 10:27:12 crc kubenswrapper[4685]: I1202 10:27:12.147481 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:27:12 crc kubenswrapper[4685]: I1202 10:27:12.148322 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:27:12 crc kubenswrapper[4685]: I1202 10:27:12.148403 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:27:12 crc kubenswrapper[4685]: I1202 10:27:12.150466 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:27:12 crc kubenswrapper[4685]: I1202 10:27:12.150640 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" gracePeriod=600 Dec 02 10:27:12 crc kubenswrapper[4685]: E1202 10:27:12.281928 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:27:13 crc kubenswrapper[4685]: I1202 10:27:13.117696 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" exitCode=0 Dec 02 10:27:13 crc kubenswrapper[4685]: I1202 10:27:13.117757 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd"} Dec 02 10:27:13 crc kubenswrapper[4685]: I1202 10:27:13.118050 4685 scope.go:117] "RemoveContainer" containerID="b848acb1e4ea5bfba36b62a7bd4c00f067f1b500433e3bc3d9d5085e521463fd" Dec 02 10:27:13 crc kubenswrapper[4685]: I1202 10:27:13.118733 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:27:13 crc kubenswrapper[4685]: E1202 10:27:13.119032 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:27:24 crc kubenswrapper[4685]: I1202 10:27:24.900087 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:27:24 crc kubenswrapper[4685]: E1202 10:27:24.900808 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:27:35 crc kubenswrapper[4685]: I1202 10:27:35.900280 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:27:35 crc kubenswrapper[4685]: E1202 10:27:35.903721 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:27:49 crc kubenswrapper[4685]: I1202 10:27:49.900915 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:27:49 crc kubenswrapper[4685]: E1202 10:27:49.901783 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:28:00 crc kubenswrapper[4685]: I1202 10:28:00.899960 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:28:00 crc kubenswrapper[4685]: E1202 10:28:00.900706 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:28:13 crc kubenswrapper[4685]: I1202 10:28:13.900355 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:28:13 crc kubenswrapper[4685]: E1202 10:28:13.901130 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:28:26 crc kubenswrapper[4685]: I1202 10:28:26.900901 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:28:26 crc kubenswrapper[4685]: E1202 10:28:26.904034 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:28:37 crc kubenswrapper[4685]: I1202 10:28:37.905679 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:28:37 crc kubenswrapper[4685]: E1202 10:28:37.906755 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:28:51 crc kubenswrapper[4685]: I1202 10:28:51.900057 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:28:51 crc kubenswrapper[4685]: E1202 10:28:51.900874 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:29:05 crc kubenswrapper[4685]: I1202 10:29:05.899964 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:29:05 crc kubenswrapper[4685]: E1202 10:29:05.900768 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:29:16 crc kubenswrapper[4685]: I1202 10:29:16.899790 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:29:16 crc kubenswrapper[4685]: E1202 10:29:16.900433 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:29:26 crc kubenswrapper[4685]: I1202 10:29:26.442721 4685 generic.go:334] "Generic (PLEG): container finished" podID="d8a0aadd-4e71-45c1-a810-fae1955f590f" containerID="2ecd02fbbece4aa6225c202efbd0db8e585871c4764151e96f6e181bb33d2418" exitCode=0 Dec 02 10:29:26 crc kubenswrapper[4685]: I1202 10:29:26.442811 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" event={"ID":"d8a0aadd-4e71-45c1-a810-fae1955f590f","Type":"ContainerDied","Data":"2ecd02fbbece4aa6225c202efbd0db8e585871c4764151e96f6e181bb33d2418"} Dec 02 10:29:27 crc kubenswrapper[4685]: I1202 10:29:27.908810 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:29:27 crc kubenswrapper[4685]: E1202 10:29:27.909483 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:29:27 crc kubenswrapper[4685]: I1202 10:29:27.948620 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.029431 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-bootstrap-combined-ca-bundle\") pod \"d8a0aadd-4e71-45c1-a810-fae1955f590f\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.029493 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-inventory\") pod \"d8a0aadd-4e71-45c1-a810-fae1955f590f\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.029568 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-ssh-key\") pod \"d8a0aadd-4e71-45c1-a810-fae1955f590f\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.029615 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlfk6\" (UniqueName: \"kubernetes.io/projected/d8a0aadd-4e71-45c1-a810-fae1955f590f-kube-api-access-dlfk6\") pod \"d8a0aadd-4e71-45c1-a810-fae1955f590f\" (UID: \"d8a0aadd-4e71-45c1-a810-fae1955f590f\") " Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.035161 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d8a0aadd-4e71-45c1-a810-fae1955f590f" (UID: "d8a0aadd-4e71-45c1-a810-fae1955f590f"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.045681 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8a0aadd-4e71-45c1-a810-fae1955f590f-kube-api-access-dlfk6" (OuterVolumeSpecName: "kube-api-access-dlfk6") pod "d8a0aadd-4e71-45c1-a810-fae1955f590f" (UID: "d8a0aadd-4e71-45c1-a810-fae1955f590f"). InnerVolumeSpecName "kube-api-access-dlfk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.061170 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-inventory" (OuterVolumeSpecName: "inventory") pod "d8a0aadd-4e71-45c1-a810-fae1955f590f" (UID: "d8a0aadd-4e71-45c1-a810-fae1955f590f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.069728 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d8a0aadd-4e71-45c1-a810-fae1955f590f" (UID: "d8a0aadd-4e71-45c1-a810-fae1955f590f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.132803 4685 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.132835 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.132843 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8a0aadd-4e71-45c1-a810-fae1955f590f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.132853 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlfk6\" (UniqueName: \"kubernetes.io/projected/d8a0aadd-4e71-45c1-a810-fae1955f590f-kube-api-access-dlfk6\") on node \"crc\" DevicePath \"\"" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.462902 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" event={"ID":"d8a0aadd-4e71-45c1-a810-fae1955f590f","Type":"ContainerDied","Data":"d4e67d41ae255b75a706d24e0ee4205071dcd115f502c472bb2426d0ed43ac22"} Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.462945 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4e67d41ae255b75a706d24e0ee4205071dcd115f502c472bb2426d0ed43ac22" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.462982 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.549714 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44"] Dec 02 10:29:28 crc kubenswrapper[4685]: E1202 10:29:28.550159 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8a0aadd-4e71-45c1-a810-fae1955f590f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.550179 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8a0aadd-4e71-45c1-a810-fae1955f590f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.550453 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8a0aadd-4e71-45c1-a810-fae1955f590f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.551610 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.557238 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.557326 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.557544 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.557548 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.588833 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44"] Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.640426 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.640550 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.640941 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c62w6\" (UniqueName: \"kubernetes.io/projected/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-kube-api-access-c62w6\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.742574 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c62w6\" (UniqueName: \"kubernetes.io/projected/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-kube-api-access-c62w6\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.742688 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.742752 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.748278 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.748346 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.759107 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c62w6\" (UniqueName: \"kubernetes.io/projected/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-kube-api-access-c62w6\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bpd44\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:28 crc kubenswrapper[4685]: I1202 10:29:28.883842 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.061825 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7827-account-create-update-5p2bw"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.079465 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-bl2km"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.106628 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-70ae-account-create-update-hrxr2"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.117750 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7827-account-create-update-5p2bw"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.127459 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-bl2km"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.135742 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-70ae-account-create-update-hrxr2"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.143532 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-64lh8"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.151323 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-24e3-account-create-update-qgb5r"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.159158 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-24e3-account-create-update-qgb5r"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.166881 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-64lh8"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.175126 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-brv75"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.182936 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-brv75"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.490258 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44"] Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.499980 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.913182 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3da7f8da-6317-414b-8c60-be3c86223e70" path="/var/lib/kubelet/pods/3da7f8da-6317-414b-8c60-be3c86223e70/volumes" Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.915451 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e3d77d5-8563-42ce-b25a-527cf438fa69" path="/var/lib/kubelet/pods/6e3d77d5-8563-42ce-b25a-527cf438fa69/volumes" Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.917200 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94326915-eb21-493c-a1cb-38dee42578ca" path="/var/lib/kubelet/pods/94326915-eb21-493c-a1cb-38dee42578ca/volumes" Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.918965 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a98f0fc5-1d50-448e-8694-1ce6a21302ad" path="/var/lib/kubelet/pods/a98f0fc5-1d50-448e-8694-1ce6a21302ad/volumes" Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.921375 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf664d50-8650-418f-b0d8-f6c03236e6c9" path="/var/lib/kubelet/pods/bf664d50-8650-418f-b0d8-f6c03236e6c9/volumes" Dec 02 10:29:29 crc kubenswrapper[4685]: I1202 10:29:29.924001 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce382e1f-af3a-4231-95f4-8122d70297ac" path="/var/lib/kubelet/pods/ce382e1f-af3a-4231-95f4-8122d70297ac/volumes" Dec 02 10:29:30 crc kubenswrapper[4685]: I1202 10:29:30.481219 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" event={"ID":"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c","Type":"ContainerStarted","Data":"f843d5887b623c5996ac99d24d4c0940e3c8553028f1d54aa84f2fc3b2228ccb"} Dec 02 10:29:30 crc kubenswrapper[4685]: I1202 10:29:30.481262 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" event={"ID":"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c","Type":"ContainerStarted","Data":"1c00b64cd92b26f1d29d015f1de6817935b76e2f129465ef4362882ebf7d8195"} Dec 02 10:29:30 crc kubenswrapper[4685]: I1202 10:29:30.509125 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" podStartSLOduration=1.9779548089999999 podStartE2EDuration="2.509104794s" podCreationTimestamp="2025-12-02 10:29:28 +0000 UTC" firstStartedPulling="2025-12-02 10:29:29.499621069 +0000 UTC m=+1661.871395243" lastFinishedPulling="2025-12-02 10:29:30.030771044 +0000 UTC m=+1662.402545228" observedRunningTime="2025-12-02 10:29:30.498383899 +0000 UTC m=+1662.870158053" watchObservedRunningTime="2025-12-02 10:29:30.509104794 +0000 UTC m=+1662.880878968" Dec 02 10:29:39 crc kubenswrapper[4685]: I1202 10:29:39.899132 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:29:39 crc kubenswrapper[4685]: E1202 10:29:39.899843 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:29:54 crc kubenswrapper[4685]: I1202 10:29:54.899781 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:29:54 crc kubenswrapper[4685]: E1202 10:29:54.901139 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:29:58 crc kubenswrapper[4685]: I1202 10:29:58.063007 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-lwcqt"] Dec 02 10:29:58 crc kubenswrapper[4685]: I1202 10:29:58.074208 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-lwcqt"] Dec 02 10:29:59 crc kubenswrapper[4685]: I1202 10:29:59.911610 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c" path="/var/lib/kubelet/pods/5d2bb8b8-1101-4788-bb6e-2e80d7a0e84c/volumes" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.147837 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm"] Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.148996 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.152087 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.156189 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.166572 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm"] Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.239338 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psqcg\" (UniqueName: \"kubernetes.io/projected/ea8b2777-8afd-44fb-b110-191728024aae-kube-api-access-psqcg\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.239423 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea8b2777-8afd-44fb-b110-191728024aae-secret-volume\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.239466 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea8b2777-8afd-44fb-b110-191728024aae-config-volume\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.341702 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psqcg\" (UniqueName: \"kubernetes.io/projected/ea8b2777-8afd-44fb-b110-191728024aae-kube-api-access-psqcg\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.341784 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea8b2777-8afd-44fb-b110-191728024aae-secret-volume\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.341821 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea8b2777-8afd-44fb-b110-191728024aae-config-volume\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.342638 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea8b2777-8afd-44fb-b110-191728024aae-config-volume\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.353483 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea8b2777-8afd-44fb-b110-191728024aae-secret-volume\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.357236 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psqcg\" (UniqueName: \"kubernetes.io/projected/ea8b2777-8afd-44fb-b110-191728024aae-kube-api-access-psqcg\") pod \"collect-profiles-29411190-t44tm\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.467233 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:00 crc kubenswrapper[4685]: I1202 10:30:00.967919 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm"] Dec 02 10:30:00 crc kubenswrapper[4685]: W1202 10:30:00.979442 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea8b2777_8afd_44fb_b110_191728024aae.slice/crio-453032b26d56d2283d0da423ef5dbe86b516a0b085e2d16bb09895772284886a WatchSource:0}: Error finding container 453032b26d56d2283d0da423ef5dbe86b516a0b085e2d16bb09895772284886a: Status 404 returned error can't find the container with id 453032b26d56d2283d0da423ef5dbe86b516a0b085e2d16bb09895772284886a Dec 02 10:30:01 crc kubenswrapper[4685]: I1202 10:30:01.808582 4685 generic.go:334] "Generic (PLEG): container finished" podID="ea8b2777-8afd-44fb-b110-191728024aae" containerID="7a651c8aef4829787cd80036ac70ae73ef15b486af419f5f31e14e24c72423d4" exitCode=0 Dec 02 10:30:01 crc kubenswrapper[4685]: I1202 10:30:01.808643 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" event={"ID":"ea8b2777-8afd-44fb-b110-191728024aae","Type":"ContainerDied","Data":"7a651c8aef4829787cd80036ac70ae73ef15b486af419f5f31e14e24c72423d4"} Dec 02 10:30:01 crc kubenswrapper[4685]: I1202 10:30:01.808913 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" event={"ID":"ea8b2777-8afd-44fb-b110-191728024aae","Type":"ContainerStarted","Data":"453032b26d56d2283d0da423ef5dbe86b516a0b085e2d16bb09895772284886a"} Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.362984 4685 scope.go:117] "RemoveContainer" containerID="c5c2e78d234ea9e2062dc810ec8723c7782c74817ffd92acbe83318d29c3e86e" Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.388608 4685 scope.go:117] "RemoveContainer" containerID="7b3ae08e4e0f9264eac218d2f2c8a6bb473c9af26db10218c6e7669987670d77" Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.462733 4685 scope.go:117] "RemoveContainer" containerID="1c8170a7bf07f7bf644aca68fb270154b24a0876d6f85a7872056351c91363c6" Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.513241 4685 scope.go:117] "RemoveContainer" containerID="00c3596965ba2329d97f84cc30b9051d1b4f5c41ac1ef187899c2ab6be6e9c65" Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.567951 4685 scope.go:117] "RemoveContainer" containerID="882b3af711315c3461e618ab409db4a057393088741a46e2fecad8e14071c331" Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.619036 4685 scope.go:117] "RemoveContainer" containerID="25880f54744e792bc87dda25b37ca667ed2a9b608bd3f6c6c6c6b7926c5a552d" Dec 02 10:30:02 crc kubenswrapper[4685]: I1202 10:30:02.645132 4685 scope.go:117] "RemoveContainer" containerID="f03bb2d5974a9ea93123ffcea6573116bd80c2e7058d5eb8b453114de11137f7" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.018018 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.094516 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psqcg\" (UniqueName: \"kubernetes.io/projected/ea8b2777-8afd-44fb-b110-191728024aae-kube-api-access-psqcg\") pod \"ea8b2777-8afd-44fb-b110-191728024aae\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.094599 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea8b2777-8afd-44fb-b110-191728024aae-secret-volume\") pod \"ea8b2777-8afd-44fb-b110-191728024aae\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.094674 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea8b2777-8afd-44fb-b110-191728024aae-config-volume\") pod \"ea8b2777-8afd-44fb-b110-191728024aae\" (UID: \"ea8b2777-8afd-44fb-b110-191728024aae\") " Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.095226 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea8b2777-8afd-44fb-b110-191728024aae-config-volume" (OuterVolumeSpecName: "config-volume") pod "ea8b2777-8afd-44fb-b110-191728024aae" (UID: "ea8b2777-8afd-44fb-b110-191728024aae"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.096988 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ea8b2777-8afd-44fb-b110-191728024aae-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.100281 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea8b2777-8afd-44fb-b110-191728024aae-kube-api-access-psqcg" (OuterVolumeSpecName: "kube-api-access-psqcg") pod "ea8b2777-8afd-44fb-b110-191728024aae" (UID: "ea8b2777-8afd-44fb-b110-191728024aae"). InnerVolumeSpecName "kube-api-access-psqcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.100911 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea8b2777-8afd-44fb-b110-191728024aae-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ea8b2777-8afd-44fb-b110-191728024aae" (UID: "ea8b2777-8afd-44fb-b110-191728024aae"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.199120 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psqcg\" (UniqueName: \"kubernetes.io/projected/ea8b2777-8afd-44fb-b110-191728024aae-kube-api-access-psqcg\") on node \"crc\" DevicePath \"\"" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.199165 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ea8b2777-8afd-44fb-b110-191728024aae-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.830747 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" event={"ID":"ea8b2777-8afd-44fb-b110-191728024aae","Type":"ContainerDied","Data":"453032b26d56d2283d0da423ef5dbe86b516a0b085e2d16bb09895772284886a"} Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.831162 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="453032b26d56d2283d0da423ef5dbe86b516a0b085e2d16bb09895772284886a" Dec 02 10:30:03 crc kubenswrapper[4685]: I1202 10:30:03.830809 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411190-t44tm" Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.071545 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-1a78-account-create-update-vmvgt"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.088877 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-jlm72"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.102671 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-af9e-account-create-update-7hd8q"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.113709 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8615-account-create-update-tgbbj"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.129422 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-qhx5w"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.138693 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-t4xqf"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.147683 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-af9e-account-create-update-7hd8q"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.157045 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-jlm72"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.165863 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-1a78-account-create-update-vmvgt"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.173035 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8615-account-create-update-tgbbj"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.180004 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-qhx5w"] Dec 02 10:30:06 crc kubenswrapper[4685]: I1202 10:30:06.187177 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-t4xqf"] Dec 02 10:30:08 crc kubenswrapper[4685]: I1202 10:30:08.994585 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f45f478-3117-4a0f-98df-9e12cfd6e32c" path="/var/lib/kubelet/pods/1f45f478-3117-4a0f-98df-9e12cfd6e32c/volumes" Dec 02 10:30:08 crc kubenswrapper[4685]: I1202 10:30:08.996861 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30d4b64f-19c4-41d2-b11e-93186a45b66f" path="/var/lib/kubelet/pods/30d4b64f-19c4-41d2-b11e-93186a45b66f/volumes" Dec 02 10:30:08 crc kubenswrapper[4685]: I1202 10:30:08.998296 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79bdcad7-d10f-44cd-ad3d-74f2ef5e361d" path="/var/lib/kubelet/pods/79bdcad7-d10f-44cd-ad3d-74f2ef5e361d/volumes" Dec 02 10:30:08 crc kubenswrapper[4685]: I1202 10:30:08.999006 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abf10e29-603f-4ca8-bf1d-9d7dcd0321cf" path="/var/lib/kubelet/pods/abf10e29-603f-4ca8-bf1d-9d7dcd0321cf/volumes" Dec 02 10:30:09 crc kubenswrapper[4685]: I1202 10:30:09.002280 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf" path="/var/lib/kubelet/pods/dbba7edb-57bb-4f58-9bf1-42fc0dd6e3bf/volumes" Dec 02 10:30:09 crc kubenswrapper[4685]: I1202 10:30:09.004886 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed622126-1bc1-49ad-8c9a-be2bfbc94a67" path="/var/lib/kubelet/pods/ed622126-1bc1-49ad-8c9a-be2bfbc94a67/volumes" Dec 02 10:30:09 crc kubenswrapper[4685]: I1202 10:30:09.899715 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:30:09 crc kubenswrapper[4685]: E1202 10:30:09.900499 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:30:10 crc kubenswrapper[4685]: I1202 10:30:10.040804 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-cbz6h"] Dec 02 10:30:10 crc kubenswrapper[4685]: I1202 10:30:10.057055 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-cbz6h"] Dec 02 10:30:11 crc kubenswrapper[4685]: I1202 10:30:11.909993 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c794e1d-41e7-43e2-bd5c-bed20f3ea587" path="/var/lib/kubelet/pods/8c794e1d-41e7-43e2-bd5c-bed20f3ea587/volumes" Dec 02 10:30:20 crc kubenswrapper[4685]: I1202 10:30:20.900316 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:30:20 crc kubenswrapper[4685]: E1202 10:30:20.901097 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:30:33 crc kubenswrapper[4685]: I1202 10:30:33.903539 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:30:33 crc kubenswrapper[4685]: E1202 10:30:33.904234 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:30:45 crc kubenswrapper[4685]: I1202 10:30:45.900091 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:30:45 crc kubenswrapper[4685]: E1202 10:30:45.900885 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:30:49 crc kubenswrapper[4685]: I1202 10:30:49.072502 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-f47mj"] Dec 02 10:30:49 crc kubenswrapper[4685]: I1202 10:30:49.086486 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-f47mj"] Dec 02 10:30:49 crc kubenswrapper[4685]: I1202 10:30:49.912330 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7e4b942-530d-42f2-8ba6-f432991c850d" path="/var/lib/kubelet/pods/a7e4b942-530d-42f2-8ba6-f432991c850d/volumes" Dec 02 10:30:58 crc kubenswrapper[4685]: I1202 10:30:58.899652 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:30:58 crc kubenswrapper[4685]: E1202 10:30:58.900927 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:31:02 crc kubenswrapper[4685]: I1202 10:31:02.780834 4685 scope.go:117] "RemoveContainer" containerID="aafb76207a76a15e7374329db4aad5f9db70949a8544585f98dc1126f4876109" Dec 02 10:31:02 crc kubenswrapper[4685]: I1202 10:31:02.841163 4685 scope.go:117] "RemoveContainer" containerID="31663d0406cddf22bf5aa77408adfccbf8e47aa626ed3c51e1c8a71601f1432b" Dec 02 10:31:02 crc kubenswrapper[4685]: I1202 10:31:02.885061 4685 scope.go:117] "RemoveContainer" containerID="6fa2be5ff627ca8d8a4e37fdbd3590f326ed189f0a88e77d60133375c7163bef" Dec 02 10:31:02 crc kubenswrapper[4685]: I1202 10:31:02.928395 4685 scope.go:117] "RemoveContainer" containerID="861a570cc2c079de5fc578479fd15720d12453e06679d6efe975970bc7ae5b65" Dec 02 10:31:02 crc kubenswrapper[4685]: I1202 10:31:02.984733 4685 scope.go:117] "RemoveContainer" containerID="9dda76e504be83fb2bc66894f07a81609bf305bc96fd23526198ca268ed174bb" Dec 02 10:31:03 crc kubenswrapper[4685]: I1202 10:31:03.016781 4685 scope.go:117] "RemoveContainer" containerID="8f7686b6cd630eb3d4b23ca0fcadf88d3742aa5132318a27ddc05d1da0905e03" Dec 02 10:31:03 crc kubenswrapper[4685]: I1202 10:31:03.061534 4685 scope.go:117] "RemoveContainer" containerID="0052ff73f2be1b7fdd02ba5e98e16655fac6b9efb1f1d635e8e331114ba4666b" Dec 02 10:31:03 crc kubenswrapper[4685]: I1202 10:31:03.087996 4685 scope.go:117] "RemoveContainer" containerID="7f1f09b373104069d4a41258d0a09da721cf878a2c264341201509a70b9cfa52" Dec 02 10:31:06 crc kubenswrapper[4685]: I1202 10:31:06.047618 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-v9lzr"] Dec 02 10:31:06 crc kubenswrapper[4685]: I1202 10:31:06.065451 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-v9lzr"] Dec 02 10:31:07 crc kubenswrapper[4685]: I1202 10:31:07.914842 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362" path="/var/lib/kubelet/pods/8ffb3b05-7ecb-4b66-b3ff-dc89a60b1362/volumes" Dec 02 10:31:09 crc kubenswrapper[4685]: I1202 10:31:09.045225 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-mfhwg"] Dec 02 10:31:09 crc kubenswrapper[4685]: I1202 10:31:09.056475 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-9m45k"] Dec 02 10:31:09 crc kubenswrapper[4685]: I1202 10:31:09.065880 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-mfhwg"] Dec 02 10:31:09 crc kubenswrapper[4685]: I1202 10:31:09.089253 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-9m45k"] Dec 02 10:31:09 crc kubenswrapper[4685]: I1202 10:31:09.928697 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83601b22-374e-4d44-85a3-eed233d3ff11" path="/var/lib/kubelet/pods/83601b22-374e-4d44-85a3-eed233d3ff11/volumes" Dec 02 10:31:09 crc kubenswrapper[4685]: I1202 10:31:09.929590 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcc4c853-7816-4912-9c98-55c29ae90396" path="/var/lib/kubelet/pods/fcc4c853-7816-4912-9c98-55c29ae90396/volumes" Dec 02 10:31:12 crc kubenswrapper[4685]: I1202 10:31:12.899921 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:31:12 crc kubenswrapper[4685]: E1202 10:31:12.900587 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:31:26 crc kubenswrapper[4685]: I1202 10:31:26.061002 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-xx8rz"] Dec 02 10:31:26 crc kubenswrapper[4685]: I1202 10:31:26.076700 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-xx8rz"] Dec 02 10:31:26 crc kubenswrapper[4685]: I1202 10:31:26.899990 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:31:26 crc kubenswrapper[4685]: E1202 10:31:26.900735 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:31:27 crc kubenswrapper[4685]: I1202 10:31:27.914254 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b558b4d-8398-435d-8925-e36325681252" path="/var/lib/kubelet/pods/5b558b4d-8398-435d-8925-e36325681252/volumes" Dec 02 10:31:31 crc kubenswrapper[4685]: I1202 10:31:31.051533 4685 generic.go:334] "Generic (PLEG): container finished" podID="1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" containerID="f843d5887b623c5996ac99d24d4c0940e3c8553028f1d54aa84f2fc3b2228ccb" exitCode=0 Dec 02 10:31:31 crc kubenswrapper[4685]: I1202 10:31:31.051648 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" event={"ID":"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c","Type":"ContainerDied","Data":"f843d5887b623c5996ac99d24d4c0940e3c8553028f1d54aa84f2fc3b2228ccb"} Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.517608 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.668926 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c62w6\" (UniqueName: \"kubernetes.io/projected/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-kube-api-access-c62w6\") pod \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.668994 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-inventory\") pod \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.669106 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-ssh-key\") pod \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\" (UID: \"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c\") " Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.681496 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-kube-api-access-c62w6" (OuterVolumeSpecName: "kube-api-access-c62w6") pod "1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" (UID: "1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c"). InnerVolumeSpecName "kube-api-access-c62w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.696613 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-inventory" (OuterVolumeSpecName: "inventory") pod "1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" (UID: "1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.701869 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" (UID: "1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.771874 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.771941 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c62w6\" (UniqueName: \"kubernetes.io/projected/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-kube-api-access-c62w6\") on node \"crc\" DevicePath \"\"" Dec 02 10:31:32 crc kubenswrapper[4685]: I1202 10:31:32.771957 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.074315 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" event={"ID":"1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c","Type":"ContainerDied","Data":"1c00b64cd92b26f1d29d015f1de6817935b76e2f129465ef4362882ebf7d8195"} Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.074616 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c00b64cd92b26f1d29d015f1de6817935b76e2f129465ef4362882ebf7d8195" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.074414 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bpd44" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.177265 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5"] Dec 02 10:31:33 crc kubenswrapper[4685]: E1202 10:31:33.177758 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea8b2777-8afd-44fb-b110-191728024aae" containerName="collect-profiles" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.177778 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea8b2777-8afd-44fb-b110-191728024aae" containerName="collect-profiles" Dec 02 10:31:33 crc kubenswrapper[4685]: E1202 10:31:33.177796 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.177805 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.178036 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea8b2777-8afd-44fb-b110-191728024aae" containerName="collect-profiles" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.178060 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.178821 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.181940 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.181977 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn8jh\" (UniqueName: \"kubernetes.io/projected/fb248306-2b41-458e-9127-987af525ae12-kube-api-access-tn8jh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.182090 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.184089 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.184274 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.184988 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.185421 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.205471 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5"] Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.283827 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.283952 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.283976 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn8jh\" (UniqueName: \"kubernetes.io/projected/fb248306-2b41-458e-9127-987af525ae12-kube-api-access-tn8jh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.287909 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.293198 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.304077 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn8jh\" (UniqueName: \"kubernetes.io/projected/fb248306-2b41-458e-9127-987af525ae12-kube-api-access-tn8jh\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-899q5\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.503762 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:31:33 crc kubenswrapper[4685]: I1202 10:31:33.880732 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5"] Dec 02 10:31:34 crc kubenswrapper[4685]: I1202 10:31:34.084122 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" event={"ID":"fb248306-2b41-458e-9127-987af525ae12","Type":"ContainerStarted","Data":"6846f77405dc464ad2ced933223999c8a4e494b76d87b61fb9778309ccc61af4"} Dec 02 10:31:35 crc kubenswrapper[4685]: I1202 10:31:35.096726 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" event={"ID":"fb248306-2b41-458e-9127-987af525ae12","Type":"ContainerStarted","Data":"ff9f6e41fd17f6243b7565782cda15976ae5f8372dd913cf186cf046bbc773b6"} Dec 02 10:31:41 crc kubenswrapper[4685]: I1202 10:31:41.899893 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:31:41 crc kubenswrapper[4685]: E1202 10:31:41.900694 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:31:52 crc kubenswrapper[4685]: I1202 10:31:52.900923 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:31:52 crc kubenswrapper[4685]: E1202 10:31:52.901818 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.076724 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" podStartSLOduration=29.392678983 podStartE2EDuration="30.076697848s" podCreationTimestamp="2025-12-02 10:31:33 +0000 UTC" firstStartedPulling="2025-12-02 10:31:33.891254732 +0000 UTC m=+1786.263028886" lastFinishedPulling="2025-12-02 10:31:34.575273587 +0000 UTC m=+1786.947047751" observedRunningTime="2025-12-02 10:31:35.125614289 +0000 UTC m=+1787.497388453" watchObservedRunningTime="2025-12-02 10:32:03.076697848 +0000 UTC m=+1815.448472012" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.083946 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-cvh77"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.097655 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a3e9-account-create-update-8vfbk"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.104417 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-45ee-account-create-update-xc7h6"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.111050 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-6zx4q"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.118440 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-4q5pt"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.125979 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a3e9-account-create-update-8vfbk"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.132804 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-45ee-account-create-update-xc7h6"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.139360 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-cvh77"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.145624 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-787b-account-create-update-bztf9"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.152243 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-4q5pt"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.158354 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-6zx4q"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.169156 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-787b-account-create-update-bztf9"] Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.256834 4685 scope.go:117] "RemoveContainer" containerID="250cda1f46a6099f638e7b8f0a8261f8d473b2e75c320a2969a4426fd7a695cf" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.319959 4685 scope.go:117] "RemoveContainer" containerID="7515c3e25a47861ece92cfebe06534215c8bd020b4182bba15c432640bc597e7" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.357177 4685 scope.go:117] "RemoveContainer" containerID="ee91c84bd6126d401419888b4d3ddd01067c50bb40b385ac1c8c9b640048313b" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.399403 4685 scope.go:117] "RemoveContainer" containerID="ed21cd4795c0f484c817670e9329fabd31dff1365d9260d1fdacf9f928618d6d" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.919250 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02632b3a-ff2c-4d71-9bf9-ed781da5b3fa" path="/var/lib/kubelet/pods/02632b3a-ff2c-4d71-9bf9-ed781da5b3fa/volumes" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.919890 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b6eac8f-13f6-4a05-87fb-1ee6c96186ac" path="/var/lib/kubelet/pods/0b6eac8f-13f6-4a05-87fb-1ee6c96186ac/volumes" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.920446 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1106914c-d68c-4f72-b83e-58ad61aea61f" path="/var/lib/kubelet/pods/1106914c-d68c-4f72-b83e-58ad61aea61f/volumes" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.921044 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="550e98fe-cb75-49d0-9711-ac6241981523" path="/var/lib/kubelet/pods/550e98fe-cb75-49d0-9711-ac6241981523/volumes" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.922027 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad0963bd-ed5f-4a2e-b723-0824ad646bdf" path="/var/lib/kubelet/pods/ad0963bd-ed5f-4a2e-b723-0824ad646bdf/volumes" Dec 02 10:32:03 crc kubenswrapper[4685]: I1202 10:32:03.922588 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0ce999c-4e4b-413e-8e21-08b9c6e40b3f" path="/var/lib/kubelet/pods/e0ce999c-4e4b-413e-8e21-08b9c6e40b3f/volumes" Dec 02 10:32:05 crc kubenswrapper[4685]: I1202 10:32:05.902272 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:32:05 crc kubenswrapper[4685]: E1202 10:32:05.902859 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:32:19 crc kubenswrapper[4685]: I1202 10:32:19.900045 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:32:20 crc kubenswrapper[4685]: I1202 10:32:20.575553 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"d9a0450853ed82258a1cfaa327b75cdfa803fd37df35161147c84e1dbdb1aba0"} Dec 02 10:32:41 crc kubenswrapper[4685]: I1202 10:32:41.078645 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hddhn"] Dec 02 10:32:41 crc kubenswrapper[4685]: I1202 10:32:41.094816 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hddhn"] Dec 02 10:32:41 crc kubenswrapper[4685]: I1202 10:32:41.916394 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="469cbb32-2f63-4f4b-813d-e07e778e0eaf" path="/var/lib/kubelet/pods/469cbb32-2f63-4f4b-813d-e07e778e0eaf/volumes" Dec 02 10:32:56 crc kubenswrapper[4685]: I1202 10:32:56.971622 4685 generic.go:334] "Generic (PLEG): container finished" podID="fb248306-2b41-458e-9127-987af525ae12" containerID="ff9f6e41fd17f6243b7565782cda15976ae5f8372dd913cf186cf046bbc773b6" exitCode=0 Dec 02 10:32:56 crc kubenswrapper[4685]: I1202 10:32:56.971839 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" event={"ID":"fb248306-2b41-458e-9127-987af525ae12","Type":"ContainerDied","Data":"ff9f6e41fd17f6243b7565782cda15976ae5f8372dd913cf186cf046bbc773b6"} Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.444673 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.585246 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-ssh-key\") pod \"fb248306-2b41-458e-9127-987af525ae12\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.585452 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn8jh\" (UniqueName: \"kubernetes.io/projected/fb248306-2b41-458e-9127-987af525ae12-kube-api-access-tn8jh\") pod \"fb248306-2b41-458e-9127-987af525ae12\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.585563 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-inventory\") pod \"fb248306-2b41-458e-9127-987af525ae12\" (UID: \"fb248306-2b41-458e-9127-987af525ae12\") " Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.592736 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb248306-2b41-458e-9127-987af525ae12-kube-api-access-tn8jh" (OuterVolumeSpecName: "kube-api-access-tn8jh") pod "fb248306-2b41-458e-9127-987af525ae12" (UID: "fb248306-2b41-458e-9127-987af525ae12"). InnerVolumeSpecName "kube-api-access-tn8jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.613709 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fb248306-2b41-458e-9127-987af525ae12" (UID: "fb248306-2b41-458e-9127-987af525ae12"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.615110 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-inventory" (OuterVolumeSpecName: "inventory") pod "fb248306-2b41-458e-9127-987af525ae12" (UID: "fb248306-2b41-458e-9127-987af525ae12"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.688075 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.688114 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn8jh\" (UniqueName: \"kubernetes.io/projected/fb248306-2b41-458e-9127-987af525ae12-kube-api-access-tn8jh\") on node \"crc\" DevicePath \"\"" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.688124 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb248306-2b41-458e-9127-987af525ae12-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.997130 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" event={"ID":"fb248306-2b41-458e-9127-987af525ae12","Type":"ContainerDied","Data":"6846f77405dc464ad2ced933223999c8a4e494b76d87b61fb9778309ccc61af4"} Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.997171 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6846f77405dc464ad2ced933223999c8a4e494b76d87b61fb9778309ccc61af4" Dec 02 10:32:58 crc kubenswrapper[4685]: I1202 10:32:58.997177 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-899q5" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.099766 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn"] Dec 02 10:32:59 crc kubenswrapper[4685]: E1202 10:32:59.100199 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb248306-2b41-458e-9127-987af525ae12" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.100215 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb248306-2b41-458e-9127-987af525ae12" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.100406 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb248306-2b41-458e-9127-987af525ae12" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.101478 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.108076 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.108260 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.108428 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.108627 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.115201 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn"] Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.197535 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.197734 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prmf9\" (UniqueName: \"kubernetes.io/projected/13eea0ea-5642-4780-9aaa-dd0148f05809-kube-api-access-prmf9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.197843 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.299048 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.299225 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prmf9\" (UniqueName: \"kubernetes.io/projected/13eea0ea-5642-4780-9aaa-dd0148f05809-kube-api-access-prmf9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.299316 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.304141 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.307002 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.327185 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prmf9\" (UniqueName: \"kubernetes.io/projected/13eea0ea-5642-4780-9aaa-dd0148f05809-kube-api-access-prmf9\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.426399 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:32:59 crc kubenswrapper[4685]: I1202 10:32:59.950947 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn"] Dec 02 10:33:00 crc kubenswrapper[4685]: I1202 10:33:00.006663 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" event={"ID":"13eea0ea-5642-4780-9aaa-dd0148f05809","Type":"ContainerStarted","Data":"25ced951d2499735ccf5c0d3c5065a9cd74b9542dcc02297f4af76ba1345fc41"} Dec 02 10:33:01 crc kubenswrapper[4685]: I1202 10:33:01.017506 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" event={"ID":"13eea0ea-5642-4780-9aaa-dd0148f05809","Type":"ContainerStarted","Data":"dcff2d70cd6836fd30107193668dbd75c80d78bb8863e810e3a382f56a3c644c"} Dec 02 10:33:01 crc kubenswrapper[4685]: I1202 10:33:01.035371 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" podStartSLOduration=1.561043603 podStartE2EDuration="2.03533198s" podCreationTimestamp="2025-12-02 10:32:59 +0000 UTC" firstStartedPulling="2025-12-02 10:32:59.953124664 +0000 UTC m=+1872.324898818" lastFinishedPulling="2025-12-02 10:33:00.427413001 +0000 UTC m=+1872.799187195" observedRunningTime="2025-12-02 10:33:01.034436076 +0000 UTC m=+1873.406210230" watchObservedRunningTime="2025-12-02 10:33:01.03533198 +0000 UTC m=+1873.407106134" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.550097 4685 scope.go:117] "RemoveContainer" containerID="bae334d8fbd2a40380dc77bcc18aa4add24982d8636558670cf71c574364bd2f" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.592657 4685 scope.go:117] "RemoveContainer" containerID="0c892ffdda94fb32a7f9334a30c03497dc3478d74e06b4768ecf0c4746d1c221" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.643540 4685 scope.go:117] "RemoveContainer" containerID="c5949b2135f1a4ff717be8568ada6c183dd79f7b288d5904b17b0246ddff2a79" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.703963 4685 scope.go:117] "RemoveContainer" containerID="f6a1da09c0ebfd0bb6c124f02821e7fb092be18e45cc4d285f027d51cbe8b5d3" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.729918 4685 scope.go:117] "RemoveContainer" containerID="dd3afc3a284773632c10bbc49a3968bef6d20cd77e5ad3ed446da46c46ed1da1" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.774264 4685 scope.go:117] "RemoveContainer" containerID="3081afbcf59f10192be7b1b139db5e2d5593ecdf0d5aae40434641dc1fd6da1e" Dec 02 10:33:03 crc kubenswrapper[4685]: I1202 10:33:03.808940 4685 scope.go:117] "RemoveContainer" containerID="64bba70de56ea63066b4554ef45801fc33b8fa1f156b984c4a945749fb4cfef7" Dec 02 10:33:06 crc kubenswrapper[4685]: I1202 10:33:06.087739 4685 generic.go:334] "Generic (PLEG): container finished" podID="13eea0ea-5642-4780-9aaa-dd0148f05809" containerID="dcff2d70cd6836fd30107193668dbd75c80d78bb8863e810e3a382f56a3c644c" exitCode=0 Dec 02 10:33:06 crc kubenswrapper[4685]: I1202 10:33:06.087809 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" event={"ID":"13eea0ea-5642-4780-9aaa-dd0148f05809","Type":"ContainerDied","Data":"dcff2d70cd6836fd30107193668dbd75c80d78bb8863e810e3a382f56a3c644c"} Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.740001 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.879991 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prmf9\" (UniqueName: \"kubernetes.io/projected/13eea0ea-5642-4780-9aaa-dd0148f05809-kube-api-access-prmf9\") pod \"13eea0ea-5642-4780-9aaa-dd0148f05809\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.880107 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-inventory\") pod \"13eea0ea-5642-4780-9aaa-dd0148f05809\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.880127 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-ssh-key\") pod \"13eea0ea-5642-4780-9aaa-dd0148f05809\" (UID: \"13eea0ea-5642-4780-9aaa-dd0148f05809\") " Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.886259 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13eea0ea-5642-4780-9aaa-dd0148f05809-kube-api-access-prmf9" (OuterVolumeSpecName: "kube-api-access-prmf9") pod "13eea0ea-5642-4780-9aaa-dd0148f05809" (UID: "13eea0ea-5642-4780-9aaa-dd0148f05809"). InnerVolumeSpecName "kube-api-access-prmf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.917734 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-inventory" (OuterVolumeSpecName: "inventory") pod "13eea0ea-5642-4780-9aaa-dd0148f05809" (UID: "13eea0ea-5642-4780-9aaa-dd0148f05809"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.923886 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "13eea0ea-5642-4780-9aaa-dd0148f05809" (UID: "13eea0ea-5642-4780-9aaa-dd0148f05809"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.983381 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prmf9\" (UniqueName: \"kubernetes.io/projected/13eea0ea-5642-4780-9aaa-dd0148f05809-kube-api-access-prmf9\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.983437 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:07 crc kubenswrapper[4685]: I1202 10:33:07.983449 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/13eea0ea-5642-4780-9aaa-dd0148f05809-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.112531 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" event={"ID":"13eea0ea-5642-4780-9aaa-dd0148f05809","Type":"ContainerDied","Data":"25ced951d2499735ccf5c0d3c5065a9cd74b9542dcc02297f4af76ba1345fc41"} Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.112640 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25ced951d2499735ccf5c0d3c5065a9cd74b9542dcc02297f4af76ba1345fc41" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.112715 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.237459 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq"] Dec 02 10:33:08 crc kubenswrapper[4685]: E1202 10:33:08.237881 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13eea0ea-5642-4780-9aaa-dd0148f05809" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.237901 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="13eea0ea-5642-4780-9aaa-dd0148f05809" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.238088 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="13eea0ea-5642-4780-9aaa-dd0148f05809" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.238800 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.241538 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.241720 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.244762 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.246168 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.251698 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq"] Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.390349 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.390433 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndgtt\" (UniqueName: \"kubernetes.io/projected/e08815ae-1633-46ac-85ce-3aa867348763-kube-api-access-ndgtt\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.390607 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.492619 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.492737 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.492800 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndgtt\" (UniqueName: \"kubernetes.io/projected/e08815ae-1633-46ac-85ce-3aa867348763-kube-api-access-ndgtt\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.496474 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.498455 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.510881 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndgtt\" (UniqueName: \"kubernetes.io/projected/e08815ae-1633-46ac-85ce-3aa867348763-kube-api-access-ndgtt\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-t7jvq\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:08 crc kubenswrapper[4685]: I1202 10:33:08.557991 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:33:09 crc kubenswrapper[4685]: I1202 10:33:09.064497 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-f5gg9"] Dec 02 10:33:09 crc kubenswrapper[4685]: I1202 10:33:09.074420 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-f5gg9"] Dec 02 10:33:09 crc kubenswrapper[4685]: I1202 10:33:09.161744 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq"] Dec 02 10:33:09 crc kubenswrapper[4685]: I1202 10:33:09.915760 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42b0ad2c-7829-421a-b059-d71a19bc9c8e" path="/var/lib/kubelet/pods/42b0ad2c-7829-421a-b059-d71a19bc9c8e/volumes" Dec 02 10:33:10 crc kubenswrapper[4685]: I1202 10:33:10.135400 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" event={"ID":"e08815ae-1633-46ac-85ce-3aa867348763","Type":"ContainerStarted","Data":"15e7ad2abb3314338d4b6614c94e5269feb4c4331f4466f45692a3c0c94a6b0f"} Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.042031 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4lsm9"] Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.060915 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4lsm9"] Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.647751 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k7kjg"] Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.650889 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.661146 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7kjg"] Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.779520 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q4f4\" (UniqueName: \"kubernetes.io/projected/e8118446-4c8a-453e-af34-ffb30869726d-kube-api-access-8q4f4\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.779590 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-utilities\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.779643 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-catalog-content\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.882184 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-catalog-content\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.882455 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q4f4\" (UniqueName: \"kubernetes.io/projected/e8118446-4c8a-453e-af34-ffb30869726d-kube-api-access-8q4f4\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.882496 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-utilities\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.882857 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-catalog-content\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.883104 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-utilities\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.908731 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q4f4\" (UniqueName: \"kubernetes.io/projected/e8118446-4c8a-453e-af34-ffb30869726d-kube-api-access-8q4f4\") pod \"redhat-marketplace-k7kjg\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:12 crc kubenswrapper[4685]: I1202 10:33:12.976045 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.237738 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v42x8"] Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.246933 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.257883 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v42x8"] Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.395810 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9gpg\" (UniqueName: \"kubernetes.io/projected/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-kube-api-access-f9gpg\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.395888 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-catalog-content\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.395950 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-utilities\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: W1202 10:33:13.438927 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8118446_4c8a_453e_af34_ffb30869726d.slice/crio-9dbfd8e738113ed60060417105f2ac3231abad66c1602a25ac7096a541e21b67 WatchSource:0}: Error finding container 9dbfd8e738113ed60060417105f2ac3231abad66c1602a25ac7096a541e21b67: Status 404 returned error can't find the container with id 9dbfd8e738113ed60060417105f2ac3231abad66c1602a25ac7096a541e21b67 Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.441912 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7kjg"] Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.497272 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9gpg\" (UniqueName: \"kubernetes.io/projected/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-kube-api-access-f9gpg\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.497329 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-catalog-content\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.497393 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-utilities\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.497944 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-utilities\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.498075 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-catalog-content\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.521842 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9gpg\" (UniqueName: \"kubernetes.io/projected/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-kube-api-access-f9gpg\") pod \"certified-operators-v42x8\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.571047 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:13 crc kubenswrapper[4685]: I1202 10:33:13.921711 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bb811fb-5566-4c40-a281-0ea15a5360eb" path="/var/lib/kubelet/pods/6bb811fb-5566-4c40-a281-0ea15a5360eb/volumes" Dec 02 10:33:14 crc kubenswrapper[4685]: I1202 10:33:14.002530 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v42x8"] Dec 02 10:33:14 crc kubenswrapper[4685]: I1202 10:33:14.182240 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerStarted","Data":"ef96726cafcb9a921a5ea673df4336c9e4aa911d13fc3f0ee09ffebfdaf9d6b9"} Dec 02 10:33:14 crc kubenswrapper[4685]: I1202 10:33:14.185856 4685 generic.go:334] "Generic (PLEG): container finished" podID="e8118446-4c8a-453e-af34-ffb30869726d" containerID="fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d" exitCode=0 Dec 02 10:33:14 crc kubenswrapper[4685]: I1202 10:33:14.185890 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerDied","Data":"fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d"} Dec 02 10:33:14 crc kubenswrapper[4685]: I1202 10:33:14.185909 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerStarted","Data":"9dbfd8e738113ed60060417105f2ac3231abad66c1602a25ac7096a541e21b67"} Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.033609 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rgqtf"] Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.036245 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.047199 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rgqtf"] Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.131933 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-catalog-content\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.132063 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5tb2\" (UniqueName: \"kubernetes.io/projected/70ebc112-6634-474c-b331-310767b9e4f5-kube-api-access-d5tb2\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.132124 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-utilities\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.193506 4685 generic.go:334] "Generic (PLEG): container finished" podID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerID="91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2" exitCode=0 Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.193547 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerDied","Data":"91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2"} Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.234163 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5tb2\" (UniqueName: \"kubernetes.io/projected/70ebc112-6634-474c-b331-310767b9e4f5-kube-api-access-d5tb2\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.234456 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-utilities\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.234689 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-catalog-content\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.235128 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-utilities\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.235296 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-catalog-content\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.254502 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5tb2\" (UniqueName: \"kubernetes.io/projected/70ebc112-6634-474c-b331-310767b9e4f5-kube-api-access-d5tb2\") pod \"redhat-operators-rgqtf\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.358263 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:15 crc kubenswrapper[4685]: I1202 10:33:15.816942 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rgqtf"] Dec 02 10:33:15 crc kubenswrapper[4685]: W1202 10:33:15.825025 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70ebc112_6634_474c_b331_310767b9e4f5.slice/crio-bc3cef9dd612de1877e4f1e170b963a5d6da460ba72ee8d46b82c5fa192d8791 WatchSource:0}: Error finding container bc3cef9dd612de1877e4f1e170b963a5d6da460ba72ee8d46b82c5fa192d8791: Status 404 returned error can't find the container with id bc3cef9dd612de1877e4f1e170b963a5d6da460ba72ee8d46b82c5fa192d8791 Dec 02 10:33:16 crc kubenswrapper[4685]: I1202 10:33:16.207948 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerStarted","Data":"bc3cef9dd612de1877e4f1e170b963a5d6da460ba72ee8d46b82c5fa192d8791"} Dec 02 10:33:16 crc kubenswrapper[4685]: I1202 10:33:16.221204 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerStarted","Data":"130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4"} Dec 02 10:33:17 crc kubenswrapper[4685]: I1202 10:33:17.247922 4685 generic.go:334] "Generic (PLEG): container finished" podID="70ebc112-6634-474c-b331-310767b9e4f5" containerID="4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1" exitCode=0 Dec 02 10:33:17 crc kubenswrapper[4685]: I1202 10:33:17.247987 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerDied","Data":"4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1"} Dec 02 10:33:17 crc kubenswrapper[4685]: I1202 10:33:17.251260 4685 generic.go:334] "Generic (PLEG): container finished" podID="e8118446-4c8a-453e-af34-ffb30869726d" containerID="130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4" exitCode=0 Dec 02 10:33:17 crc kubenswrapper[4685]: I1202 10:33:17.251291 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerDied","Data":"130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4"} Dec 02 10:33:18 crc kubenswrapper[4685]: I1202 10:33:18.264203 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerStarted","Data":"6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858"} Dec 02 10:33:19 crc kubenswrapper[4685]: I1202 10:33:19.277113 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerStarted","Data":"1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df"} Dec 02 10:33:19 crc kubenswrapper[4685]: I1202 10:33:19.325330 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerStarted","Data":"4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a"} Dec 02 10:33:19 crc kubenswrapper[4685]: I1202 10:33:19.373633 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k7kjg" podStartSLOduration=3.556549359 podStartE2EDuration="7.373611362s" podCreationTimestamp="2025-12-02 10:33:12 +0000 UTC" firstStartedPulling="2025-12-02 10:33:14.187995125 +0000 UTC m=+1886.559769279" lastFinishedPulling="2025-12-02 10:33:18.005057128 +0000 UTC m=+1890.376831282" observedRunningTime="2025-12-02 10:33:19.353785628 +0000 UTC m=+1891.725559782" watchObservedRunningTime="2025-12-02 10:33:19.373611362 +0000 UTC m=+1891.745385526" Dec 02 10:33:20 crc kubenswrapper[4685]: I1202 10:33:20.341450 4685 generic.go:334] "Generic (PLEG): container finished" podID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerID="6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858" exitCode=0 Dec 02 10:33:20 crc kubenswrapper[4685]: I1202 10:33:20.341495 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerDied","Data":"6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858"} Dec 02 10:33:22 crc kubenswrapper[4685]: I1202 10:33:22.976664 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:22 crc kubenswrapper[4685]: I1202 10:33:22.977034 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:24 crc kubenswrapper[4685]: I1202 10:33:24.221283 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-k7kjg" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="registry-server" probeResult="failure" output=< Dec 02 10:33:24 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 10:33:24 crc kubenswrapper[4685]: > Dec 02 10:33:25 crc kubenswrapper[4685]: I1202 10:33:25.405814 4685 generic.go:334] "Generic (PLEG): container finished" podID="70ebc112-6634-474c-b331-310767b9e4f5" containerID="1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df" exitCode=0 Dec 02 10:33:25 crc kubenswrapper[4685]: I1202 10:33:25.405848 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerDied","Data":"1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df"} Dec 02 10:33:30 crc kubenswrapper[4685]: I1202 10:33:30.480659 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerStarted","Data":"799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f"} Dec 02 10:33:30 crc kubenswrapper[4685]: I1202 10:33:30.484700 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" event={"ID":"e08815ae-1633-46ac-85ce-3aa867348763","Type":"ContainerStarted","Data":"7a06f52212f14d3440d66653862345beadcf35725fde914e1f826e3d4a6ce2cc"} Dec 02 10:33:30 crc kubenswrapper[4685]: I1202 10:33:30.510178 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v42x8" podStartSLOduration=2.552347295 podStartE2EDuration="17.510160406s" podCreationTimestamp="2025-12-02 10:33:13 +0000 UTC" firstStartedPulling="2025-12-02 10:33:15.196906034 +0000 UTC m=+1887.568680188" lastFinishedPulling="2025-12-02 10:33:30.154719145 +0000 UTC m=+1902.526493299" observedRunningTime="2025-12-02 10:33:30.507326058 +0000 UTC m=+1902.879100212" watchObservedRunningTime="2025-12-02 10:33:30.510160406 +0000 UTC m=+1902.881934560" Dec 02 10:33:30 crc kubenswrapper[4685]: I1202 10:33:30.526282 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" podStartSLOduration=1.875193408 podStartE2EDuration="22.526264598s" podCreationTimestamp="2025-12-02 10:33:08 +0000 UTC" firstStartedPulling="2025-12-02 10:33:09.157814889 +0000 UTC m=+1881.529589063" lastFinishedPulling="2025-12-02 10:33:29.808886099 +0000 UTC m=+1902.180660253" observedRunningTime="2025-12-02 10:33:30.519891892 +0000 UTC m=+1902.891666046" watchObservedRunningTime="2025-12-02 10:33:30.526264598 +0000 UTC m=+1902.898038752" Dec 02 10:33:31 crc kubenswrapper[4685]: I1202 10:33:31.496385 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerStarted","Data":"92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746"} Dec 02 10:33:31 crc kubenswrapper[4685]: I1202 10:33:31.517207 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rgqtf" podStartSLOduration=3.339909114 podStartE2EDuration="16.517192132s" podCreationTimestamp="2025-12-02 10:33:15 +0000 UTC" firstStartedPulling="2025-12-02 10:33:17.249481262 +0000 UTC m=+1889.621255416" lastFinishedPulling="2025-12-02 10:33:30.42676428 +0000 UTC m=+1902.798538434" observedRunningTime="2025-12-02 10:33:31.515046764 +0000 UTC m=+1903.886820928" watchObservedRunningTime="2025-12-02 10:33:31.517192132 +0000 UTC m=+1903.888966296" Dec 02 10:33:33 crc kubenswrapper[4685]: I1202 10:33:33.037104 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:33 crc kubenswrapper[4685]: I1202 10:33:33.090108 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:33 crc kubenswrapper[4685]: I1202 10:33:33.274461 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7kjg"] Dec 02 10:33:33 crc kubenswrapper[4685]: I1202 10:33:33.571668 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:33 crc kubenswrapper[4685]: I1202 10:33:33.571720 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:34 crc kubenswrapper[4685]: I1202 10:33:34.521474 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k7kjg" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="registry-server" containerID="cri-o://4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a" gracePeriod=2 Dec 02 10:33:34 crc kubenswrapper[4685]: I1202 10:33:34.617606 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-v42x8" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="registry-server" probeResult="failure" output=< Dec 02 10:33:34 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 10:33:34 crc kubenswrapper[4685]: > Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.194773 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.251711 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-catalog-content\") pod \"e8118446-4c8a-453e-af34-ffb30869726d\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.251925 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8q4f4\" (UniqueName: \"kubernetes.io/projected/e8118446-4c8a-453e-af34-ffb30869726d-kube-api-access-8q4f4\") pod \"e8118446-4c8a-453e-af34-ffb30869726d\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.252008 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-utilities\") pod \"e8118446-4c8a-453e-af34-ffb30869726d\" (UID: \"e8118446-4c8a-453e-af34-ffb30869726d\") " Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.252542 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-utilities" (OuterVolumeSpecName: "utilities") pod "e8118446-4c8a-453e-af34-ffb30869726d" (UID: "e8118446-4c8a-453e-af34-ffb30869726d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.257656 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8118446-4c8a-453e-af34-ffb30869726d-kube-api-access-8q4f4" (OuterVolumeSpecName: "kube-api-access-8q4f4") pod "e8118446-4c8a-453e-af34-ffb30869726d" (UID: "e8118446-4c8a-453e-af34-ffb30869726d"). InnerVolumeSpecName "kube-api-access-8q4f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.269765 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8118446-4c8a-453e-af34-ffb30869726d" (UID: "e8118446-4c8a-453e-af34-ffb30869726d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.354550 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.354603 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8q4f4\" (UniqueName: \"kubernetes.io/projected/e8118446-4c8a-453e-af34-ffb30869726d-kube-api-access-8q4f4\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.354613 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8118446-4c8a-453e-af34-ffb30869726d-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.358853 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.358892 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.532003 4685 generic.go:334] "Generic (PLEG): container finished" podID="e8118446-4c8a-453e-af34-ffb30869726d" containerID="4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a" exitCode=0 Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.532050 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerDied","Data":"4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a"} Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.532083 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7kjg" event={"ID":"e8118446-4c8a-453e-af34-ffb30869726d","Type":"ContainerDied","Data":"9dbfd8e738113ed60060417105f2ac3231abad66c1602a25ac7096a541e21b67"} Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.532091 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7kjg" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.532105 4685 scope.go:117] "RemoveContainer" containerID="4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.556275 4685 scope.go:117] "RemoveContainer" containerID="130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.590103 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7kjg"] Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.591287 4685 scope.go:117] "RemoveContainer" containerID="fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.604739 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7kjg"] Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.628491 4685 scope.go:117] "RemoveContainer" containerID="4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a" Dec 02 10:33:35 crc kubenswrapper[4685]: E1202 10:33:35.632178 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a\": container with ID starting with 4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a not found: ID does not exist" containerID="4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.632213 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a"} err="failed to get container status \"4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a\": rpc error: code = NotFound desc = could not find container \"4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a\": container with ID starting with 4198c3b8c7a9541c62502f86aa91e0c4fca843d0a98630387b86c8d04835052a not found: ID does not exist" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.632240 4685 scope.go:117] "RemoveContainer" containerID="130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4" Dec 02 10:33:35 crc kubenswrapper[4685]: E1202 10:33:35.632455 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4\": container with ID starting with 130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4 not found: ID does not exist" containerID="130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.632482 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4"} err="failed to get container status \"130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4\": rpc error: code = NotFound desc = could not find container \"130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4\": container with ID starting with 130e95bd4555c2402a84ff7f1f6df0fe45ddb6316b1a38282998a5d00253b9a4 not found: ID does not exist" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.632496 4685 scope.go:117] "RemoveContainer" containerID="fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d" Dec 02 10:33:35 crc kubenswrapper[4685]: E1202 10:33:35.632713 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d\": container with ID starting with fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d not found: ID does not exist" containerID="fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.632737 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d"} err="failed to get container status \"fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d\": rpc error: code = NotFound desc = could not find container \"fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d\": container with ID starting with fc8c2b2756922b4814da310d5bd50155d507d3376fc2bb6397f98238d9acb20d not found: ID does not exist" Dec 02 10:33:35 crc kubenswrapper[4685]: I1202 10:33:35.911900 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8118446-4c8a-453e-af34-ffb30869726d" path="/var/lib/kubelet/pods/e8118446-4c8a-453e-af34-ffb30869726d/volumes" Dec 02 10:33:36 crc kubenswrapper[4685]: I1202 10:33:36.410134 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rgqtf" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="registry-server" probeResult="failure" output=< Dec 02 10:33:36 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 10:33:36 crc kubenswrapper[4685]: > Dec 02 10:33:43 crc kubenswrapper[4685]: I1202 10:33:43.638962 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:43 crc kubenswrapper[4685]: I1202 10:33:43.695225 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:44 crc kubenswrapper[4685]: I1202 10:33:44.835090 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v42x8"] Dec 02 10:33:45 crc kubenswrapper[4685]: I1202 10:33:45.413025 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:45 crc kubenswrapper[4685]: I1202 10:33:45.467312 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:45 crc kubenswrapper[4685]: I1202 10:33:45.636862 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v42x8" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="registry-server" containerID="cri-o://799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f" gracePeriod=2 Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.073242 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.163423 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9gpg\" (UniqueName: \"kubernetes.io/projected/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-kube-api-access-f9gpg\") pod \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.163660 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-catalog-content\") pod \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.163749 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-utilities\") pod \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\" (UID: \"67cb7fa9-fed8-472d-8b27-ca043bc7e21a\") " Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.164358 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-utilities" (OuterVolumeSpecName: "utilities") pod "67cb7fa9-fed8-472d-8b27-ca043bc7e21a" (UID: "67cb7fa9-fed8-472d-8b27-ca043bc7e21a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.170301 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-kube-api-access-f9gpg" (OuterVolumeSpecName: "kube-api-access-f9gpg") pod "67cb7fa9-fed8-472d-8b27-ca043bc7e21a" (UID: "67cb7fa9-fed8-472d-8b27-ca043bc7e21a"). InnerVolumeSpecName "kube-api-access-f9gpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.207647 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67cb7fa9-fed8-472d-8b27-ca043bc7e21a" (UID: "67cb7fa9-fed8-472d-8b27-ca043bc7e21a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.267094 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.267168 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.267183 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9gpg\" (UniqueName: \"kubernetes.io/projected/67cb7fa9-fed8-472d-8b27-ca043bc7e21a-kube-api-access-f9gpg\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.648154 4685 generic.go:334] "Generic (PLEG): container finished" podID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerID="799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f" exitCode=0 Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.648380 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerDied","Data":"799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f"} Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.648518 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v42x8" event={"ID":"67cb7fa9-fed8-472d-8b27-ca043bc7e21a","Type":"ContainerDied","Data":"ef96726cafcb9a921a5ea673df4336c9e4aa911d13fc3f0ee09ffebfdaf9d6b9"} Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.648548 4685 scope.go:117] "RemoveContainer" containerID="799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.648481 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v42x8" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.689298 4685 scope.go:117] "RemoveContainer" containerID="6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.699486 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v42x8"] Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.707587 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v42x8"] Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.726401 4685 scope.go:117] "RemoveContainer" containerID="91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.764696 4685 scope.go:117] "RemoveContainer" containerID="799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f" Dec 02 10:33:46 crc kubenswrapper[4685]: E1202 10:33:46.765675 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f\": container with ID starting with 799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f not found: ID does not exist" containerID="799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.765736 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f"} err="failed to get container status \"799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f\": rpc error: code = NotFound desc = could not find container \"799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f\": container with ID starting with 799c8d321f934184e4da6e87d2dfa2fab15f7d8683c7437fb4dba5d01744093f not found: ID does not exist" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.765769 4685 scope.go:117] "RemoveContainer" containerID="6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858" Dec 02 10:33:46 crc kubenswrapper[4685]: E1202 10:33:46.767530 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858\": container with ID starting with 6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858 not found: ID does not exist" containerID="6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.767579 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858"} err="failed to get container status \"6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858\": rpc error: code = NotFound desc = could not find container \"6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858\": container with ID starting with 6d3fa8fd82be0d4930456196dc93d0506d77211640174fa53d0956e1e9abb858 not found: ID does not exist" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.767596 4685 scope.go:117] "RemoveContainer" containerID="91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2" Dec 02 10:33:46 crc kubenswrapper[4685]: E1202 10:33:46.767828 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2\": container with ID starting with 91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2 not found: ID does not exist" containerID="91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2" Dec 02 10:33:46 crc kubenswrapper[4685]: I1202 10:33:46.767853 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2"} err="failed to get container status \"91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2\": rpc error: code = NotFound desc = could not find container \"91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2\": container with ID starting with 91f141980e40d0aabe956e9cb87e42d0fa806151c0b275d7d8951df15ef684c2 not found: ID does not exist" Dec 02 10:33:47 crc kubenswrapper[4685]: I1202 10:33:47.836422 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rgqtf"] Dec 02 10:33:47 crc kubenswrapper[4685]: I1202 10:33:47.836857 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rgqtf" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="registry-server" containerID="cri-o://92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746" gracePeriod=2 Dec 02 10:33:47 crc kubenswrapper[4685]: I1202 10:33:47.914048 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" path="/var/lib/kubelet/pods/67cb7fa9-fed8-472d-8b27-ca043bc7e21a/volumes" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.294130 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.414697 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-catalog-content\") pod \"70ebc112-6634-474c-b331-310767b9e4f5\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.414835 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5tb2\" (UniqueName: \"kubernetes.io/projected/70ebc112-6634-474c-b331-310767b9e4f5-kube-api-access-d5tb2\") pod \"70ebc112-6634-474c-b331-310767b9e4f5\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.415198 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-utilities\") pod \"70ebc112-6634-474c-b331-310767b9e4f5\" (UID: \"70ebc112-6634-474c-b331-310767b9e4f5\") " Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.416340 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-utilities" (OuterVolumeSpecName: "utilities") pod "70ebc112-6634-474c-b331-310767b9e4f5" (UID: "70ebc112-6634-474c-b331-310767b9e4f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.428293 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ebc112-6634-474c-b331-310767b9e4f5-kube-api-access-d5tb2" (OuterVolumeSpecName: "kube-api-access-d5tb2") pod "70ebc112-6634-474c-b331-310767b9e4f5" (UID: "70ebc112-6634-474c-b331-310767b9e4f5"). InnerVolumeSpecName "kube-api-access-d5tb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.517819 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.518390 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5tb2\" (UniqueName: \"kubernetes.io/projected/70ebc112-6634-474c-b331-310767b9e4f5-kube-api-access-d5tb2\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.531708 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70ebc112-6634-474c-b331-310767b9e4f5" (UID: "70ebc112-6634-474c-b331-310767b9e4f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.620204 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70ebc112-6634-474c-b331-310767b9e4f5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.671593 4685 generic.go:334] "Generic (PLEG): container finished" podID="70ebc112-6634-474c-b331-310767b9e4f5" containerID="92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746" exitCode=0 Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.671644 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerDied","Data":"92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746"} Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.671664 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgqtf" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.671693 4685 scope.go:117] "RemoveContainer" containerID="92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.671678 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgqtf" event={"ID":"70ebc112-6634-474c-b331-310767b9e4f5","Type":"ContainerDied","Data":"bc3cef9dd612de1877e4f1e170b963a5d6da460ba72ee8d46b82c5fa192d8791"} Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.691781 4685 scope.go:117] "RemoveContainer" containerID="1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.705232 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rgqtf"] Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.716232 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rgqtf"] Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.727202 4685 scope.go:117] "RemoveContainer" containerID="4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.756075 4685 scope.go:117] "RemoveContainer" containerID="92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746" Dec 02 10:33:48 crc kubenswrapper[4685]: E1202 10:33:48.756684 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746\": container with ID starting with 92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746 not found: ID does not exist" containerID="92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.756739 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746"} err="failed to get container status \"92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746\": rpc error: code = NotFound desc = could not find container \"92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746\": container with ID starting with 92fb877f9308a85ab543fca11ce63ab1cd2b3039c319c343c673af3fc686e746 not found: ID does not exist" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.756785 4685 scope.go:117] "RemoveContainer" containerID="1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df" Dec 02 10:33:48 crc kubenswrapper[4685]: E1202 10:33:48.757242 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df\": container with ID starting with 1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df not found: ID does not exist" containerID="1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.757277 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df"} err="failed to get container status \"1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df\": rpc error: code = NotFound desc = could not find container \"1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df\": container with ID starting with 1f707041156126352463a63732b610116e819116809aacbcaf7a9eed4a92f8df not found: ID does not exist" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.757301 4685 scope.go:117] "RemoveContainer" containerID="4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1" Dec 02 10:33:48 crc kubenswrapper[4685]: E1202 10:33:48.757739 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1\": container with ID starting with 4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1 not found: ID does not exist" containerID="4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1" Dec 02 10:33:48 crc kubenswrapper[4685]: I1202 10:33:48.757877 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1"} err="failed to get container status \"4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1\": rpc error: code = NotFound desc = could not find container \"4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1\": container with ID starting with 4a3f684991ce87ae658a8c8169c0fd43b8feea8497e5e6fa18e5dbd21942dea1 not found: ID does not exist" Dec 02 10:33:49 crc kubenswrapper[4685]: I1202 10:33:49.912967 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70ebc112-6634-474c-b331-310767b9e4f5" path="/var/lib/kubelet/pods/70ebc112-6634-474c-b331-310767b9e4f5/volumes" Dec 02 10:33:53 crc kubenswrapper[4685]: I1202 10:33:53.058229 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-6z76h"] Dec 02 10:33:53 crc kubenswrapper[4685]: I1202 10:33:53.067314 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-6z76h"] Dec 02 10:33:53 crc kubenswrapper[4685]: I1202 10:33:53.912510 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2517c1e7-aba5-4015-a491-34eee9e1bebb" path="/var/lib/kubelet/pods/2517c1e7-aba5-4015-a491-34eee9e1bebb/volumes" Dec 02 10:34:03 crc kubenswrapper[4685]: I1202 10:34:03.960919 4685 scope.go:117] "RemoveContainer" containerID="cf8fd53219a24490a374951ef74c8bc7e33a0dac2e5726fac117ce27f551922e" Dec 02 10:34:04 crc kubenswrapper[4685]: I1202 10:34:04.013750 4685 scope.go:117] "RemoveContainer" containerID="0bf04bc3cada1cc2799f41365167ff3c538bf369911c4da160e6810380e1072e" Dec 02 10:34:04 crc kubenswrapper[4685]: I1202 10:34:04.106622 4685 scope.go:117] "RemoveContainer" containerID="9b97f339662b266e6cf426522c09d6ba665ea65e5a724f555ef211022a9d0fa2" Dec 02 10:34:10 crc kubenswrapper[4685]: I1202 10:34:10.872843 4685 generic.go:334] "Generic (PLEG): container finished" podID="e08815ae-1633-46ac-85ce-3aa867348763" containerID="7a06f52212f14d3440d66653862345beadcf35725fde914e1f826e3d4a6ce2cc" exitCode=0 Dec 02 10:34:10 crc kubenswrapper[4685]: I1202 10:34:10.872976 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" event={"ID":"e08815ae-1633-46ac-85ce-3aa867348763","Type":"ContainerDied","Data":"7a06f52212f14d3440d66653862345beadcf35725fde914e1f826e3d4a6ce2cc"} Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.391189 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.519435 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-inventory\") pod \"e08815ae-1633-46ac-85ce-3aa867348763\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.519517 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-ssh-key\") pod \"e08815ae-1633-46ac-85ce-3aa867348763\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.519631 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndgtt\" (UniqueName: \"kubernetes.io/projected/e08815ae-1633-46ac-85ce-3aa867348763-kube-api-access-ndgtt\") pod \"e08815ae-1633-46ac-85ce-3aa867348763\" (UID: \"e08815ae-1633-46ac-85ce-3aa867348763\") " Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.525358 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e08815ae-1633-46ac-85ce-3aa867348763-kube-api-access-ndgtt" (OuterVolumeSpecName: "kube-api-access-ndgtt") pod "e08815ae-1633-46ac-85ce-3aa867348763" (UID: "e08815ae-1633-46ac-85ce-3aa867348763"). InnerVolumeSpecName "kube-api-access-ndgtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.543194 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e08815ae-1633-46ac-85ce-3aa867348763" (UID: "e08815ae-1633-46ac-85ce-3aa867348763"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.544292 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-inventory" (OuterVolumeSpecName: "inventory") pod "e08815ae-1633-46ac-85ce-3aa867348763" (UID: "e08815ae-1633-46ac-85ce-3aa867348763"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.623005 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndgtt\" (UniqueName: \"kubernetes.io/projected/e08815ae-1633-46ac-85ce-3aa867348763-kube-api-access-ndgtt\") on node \"crc\" DevicePath \"\"" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.623031 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.623041 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e08815ae-1633-46ac-85ce-3aa867348763-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.896878 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" event={"ID":"e08815ae-1633-46ac-85ce-3aa867348763","Type":"ContainerDied","Data":"15e7ad2abb3314338d4b6614c94e5269feb4c4331f4466f45692a3c0c94a6b0f"} Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.897261 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15e7ad2abb3314338d4b6614c94e5269feb4c4331f4466f45692a3c0c94a6b0f" Dec 02 10:34:12 crc kubenswrapper[4685]: I1202 10:34:12.896947 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-t7jvq" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025149 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6"] Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025743 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="extract-content" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025764 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="extract-content" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025785 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025793 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025812 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="extract-content" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025821 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="extract-content" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025835 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e08815ae-1633-46ac-85ce-3aa867348763" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025845 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e08815ae-1633-46ac-85ce-3aa867348763" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025864 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="extract-utilities" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025873 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="extract-utilities" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025898 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025906 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025925 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="extract-utilities" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025937 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="extract-utilities" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025953 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="extract-content" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025961 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="extract-content" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.025984 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.025993 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: E1202 10:34:13.026009 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="extract-utilities" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.026017 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="extract-utilities" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.026255 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="67cb7fa9-fed8-472d-8b27-ca043bc7e21a" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.026275 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="70ebc112-6634-474c-b331-310767b9e4f5" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.026297 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e08815ae-1633-46ac-85ce-3aa867348763" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.026319 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8118446-4c8a-453e-af34-ffb30869726d" containerName="registry-server" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.027175 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.030906 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.030988 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.031220 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.035167 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.039651 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6"] Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.135440 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdn5j\" (UniqueName: \"kubernetes.io/projected/459c09ff-049f-4edf-b41c-dc203f6527dc-kube-api-access-cdn5j\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.135512 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.135782 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.237747 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdn5j\" (UniqueName: \"kubernetes.io/projected/459c09ff-049f-4edf-b41c-dc203f6527dc-kube-api-access-cdn5j\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.238256 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.238432 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.242656 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.243665 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.257954 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdn5j\" (UniqueName: \"kubernetes.io/projected/459c09ff-049f-4edf-b41c-dc203f6527dc-kube-api-access-cdn5j\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.348130 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:34:13 crc kubenswrapper[4685]: I1202 10:34:13.940584 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6"] Dec 02 10:34:13 crc kubenswrapper[4685]: W1202 10:34:13.945294 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod459c09ff_049f_4edf_b41c_dc203f6527dc.slice/crio-71930a137c6b4641a05f6f850b5939f6bcb5411373aa02091b1b359ddcf3e0dc WatchSource:0}: Error finding container 71930a137c6b4641a05f6f850b5939f6bcb5411373aa02091b1b359ddcf3e0dc: Status 404 returned error can't find the container with id 71930a137c6b4641a05f6f850b5939f6bcb5411373aa02091b1b359ddcf3e0dc Dec 02 10:34:14 crc kubenswrapper[4685]: I1202 10:34:14.923471 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" event={"ID":"459c09ff-049f-4edf-b41c-dc203f6527dc","Type":"ContainerStarted","Data":"a1e752caf1a63d5e2bec0c73afcedcdc2bfce0f80f4af89c04977d1e710fcee7"} Dec 02 10:34:14 crc kubenswrapper[4685]: I1202 10:34:14.923789 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" event={"ID":"459c09ff-049f-4edf-b41c-dc203f6527dc","Type":"ContainerStarted","Data":"71930a137c6b4641a05f6f850b5939f6bcb5411373aa02091b1b359ddcf3e0dc"} Dec 02 10:34:14 crc kubenswrapper[4685]: I1202 10:34:14.952553 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" podStartSLOduration=2.311827829 podStartE2EDuration="2.95252883s" podCreationTimestamp="2025-12-02 10:34:12 +0000 UTC" firstStartedPulling="2025-12-02 10:34:13.947700613 +0000 UTC m=+1946.319474767" lastFinishedPulling="2025-12-02 10:34:14.588401604 +0000 UTC m=+1946.960175768" observedRunningTime="2025-12-02 10:34:14.942151216 +0000 UTC m=+1947.313925390" watchObservedRunningTime="2025-12-02 10:34:14.95252883 +0000 UTC m=+1947.324303004" Dec 02 10:34:42 crc kubenswrapper[4685]: I1202 10:34:42.147704 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:34:42 crc kubenswrapper[4685]: I1202 10:34:42.148335 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:35:12 crc kubenswrapper[4685]: I1202 10:35:12.147769 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:35:12 crc kubenswrapper[4685]: I1202 10:35:12.148310 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:35:17 crc kubenswrapper[4685]: I1202 10:35:17.544554 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" event={"ID":"459c09ff-049f-4edf-b41c-dc203f6527dc","Type":"ContainerDied","Data":"a1e752caf1a63d5e2bec0c73afcedcdc2bfce0f80f4af89c04977d1e710fcee7"} Dec 02 10:35:17 crc kubenswrapper[4685]: I1202 10:35:17.544510 4685 generic.go:334] "Generic (PLEG): container finished" podID="459c09ff-049f-4edf-b41c-dc203f6527dc" containerID="a1e752caf1a63d5e2bec0c73afcedcdc2bfce0f80f4af89c04977d1e710fcee7" exitCode=0 Dec 02 10:35:18 crc kubenswrapper[4685]: I1202 10:35:18.993375 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.051546 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-ssh-key\") pod \"459c09ff-049f-4edf-b41c-dc203f6527dc\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.052074 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdn5j\" (UniqueName: \"kubernetes.io/projected/459c09ff-049f-4edf-b41c-dc203f6527dc-kube-api-access-cdn5j\") pod \"459c09ff-049f-4edf-b41c-dc203f6527dc\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.052104 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-inventory\") pod \"459c09ff-049f-4edf-b41c-dc203f6527dc\" (UID: \"459c09ff-049f-4edf-b41c-dc203f6527dc\") " Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.059956 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/459c09ff-049f-4edf-b41c-dc203f6527dc-kube-api-access-cdn5j" (OuterVolumeSpecName: "kube-api-access-cdn5j") pod "459c09ff-049f-4edf-b41c-dc203f6527dc" (UID: "459c09ff-049f-4edf-b41c-dc203f6527dc"). InnerVolumeSpecName "kube-api-access-cdn5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.083669 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "459c09ff-049f-4edf-b41c-dc203f6527dc" (UID: "459c09ff-049f-4edf-b41c-dc203f6527dc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.091289 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-inventory" (OuterVolumeSpecName: "inventory") pod "459c09ff-049f-4edf-b41c-dc203f6527dc" (UID: "459c09ff-049f-4edf-b41c-dc203f6527dc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.155084 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdn5j\" (UniqueName: \"kubernetes.io/projected/459c09ff-049f-4edf-b41c-dc203f6527dc-kube-api-access-cdn5j\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.155131 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.155144 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/459c09ff-049f-4edf-b41c-dc203f6527dc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.572049 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" event={"ID":"459c09ff-049f-4edf-b41c-dc203f6527dc","Type":"ContainerDied","Data":"71930a137c6b4641a05f6f850b5939f6bcb5411373aa02091b1b359ddcf3e0dc"} Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.572381 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71930a137c6b4641a05f6f850b5939f6bcb5411373aa02091b1b359ddcf3e0dc" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.572146 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.704354 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2md5s"] Dec 02 10:35:19 crc kubenswrapper[4685]: E1202 10:35:19.705078 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="459c09ff-049f-4edf-b41c-dc203f6527dc" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.705170 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="459c09ff-049f-4edf-b41c-dc203f6527dc" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.705507 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="459c09ff-049f-4edf-b41c-dc203f6527dc" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.706301 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.711029 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.711365 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.712862 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.713632 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.723620 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2md5s"] Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.767749 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j6dc\" (UniqueName: \"kubernetes.io/projected/d47b11b2-65de-45ac-9395-81ccca27d279-kube-api-access-5j6dc\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.767854 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.767982 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.869311 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j6dc\" (UniqueName: \"kubernetes.io/projected/d47b11b2-65de-45ac-9395-81ccca27d279-kube-api-access-5j6dc\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.869855 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.870111 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.873969 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.879318 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:19 crc kubenswrapper[4685]: I1202 10:35:19.885646 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j6dc\" (UniqueName: \"kubernetes.io/projected/d47b11b2-65de-45ac-9395-81ccca27d279-kube-api-access-5j6dc\") pod \"ssh-known-hosts-edpm-deployment-2md5s\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:20 crc kubenswrapper[4685]: I1202 10:35:20.026204 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:20 crc kubenswrapper[4685]: I1202 10:35:20.578455 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2md5s"] Dec 02 10:35:20 crc kubenswrapper[4685]: I1202 10:35:20.592024 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:35:21 crc kubenswrapper[4685]: I1202 10:35:21.594828 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" event={"ID":"d47b11b2-65de-45ac-9395-81ccca27d279","Type":"ContainerStarted","Data":"7cc6a9f496da9215154c175e172a4bb14d06ce72f37a17d93b46fb9797354b9a"} Dec 02 10:35:21 crc kubenswrapper[4685]: I1202 10:35:21.595079 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" event={"ID":"d47b11b2-65de-45ac-9395-81ccca27d279","Type":"ContainerStarted","Data":"2cd85d7f50c9bb3c192eb4b61bca6f1dac8293cf392bae71a80882e2c2f3dc20"} Dec 02 10:35:21 crc kubenswrapper[4685]: I1202 10:35:21.618595 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" podStartSLOduration=2.086994013 podStartE2EDuration="2.618546005s" podCreationTimestamp="2025-12-02 10:35:19 +0000 UTC" firstStartedPulling="2025-12-02 10:35:20.59154384 +0000 UTC m=+2012.963318024" lastFinishedPulling="2025-12-02 10:35:21.123095852 +0000 UTC m=+2013.494870016" observedRunningTime="2025-12-02 10:35:21.616634793 +0000 UTC m=+2013.988408967" watchObservedRunningTime="2025-12-02 10:35:21.618546005 +0000 UTC m=+2013.990320179" Dec 02 10:35:29 crc kubenswrapper[4685]: I1202 10:35:29.671989 4685 generic.go:334] "Generic (PLEG): container finished" podID="d47b11b2-65de-45ac-9395-81ccca27d279" containerID="7cc6a9f496da9215154c175e172a4bb14d06ce72f37a17d93b46fb9797354b9a" exitCode=0 Dec 02 10:35:29 crc kubenswrapper[4685]: I1202 10:35:29.672059 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" event={"ID":"d47b11b2-65de-45ac-9395-81ccca27d279","Type":"ContainerDied","Data":"7cc6a9f496da9215154c175e172a4bb14d06ce72f37a17d93b46fb9797354b9a"} Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.191379 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.310949 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-ssh-key-openstack-edpm-ipam\") pod \"d47b11b2-65de-45ac-9395-81ccca27d279\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.311039 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j6dc\" (UniqueName: \"kubernetes.io/projected/d47b11b2-65de-45ac-9395-81ccca27d279-kube-api-access-5j6dc\") pod \"d47b11b2-65de-45ac-9395-81ccca27d279\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.311208 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-inventory-0\") pod \"d47b11b2-65de-45ac-9395-81ccca27d279\" (UID: \"d47b11b2-65de-45ac-9395-81ccca27d279\") " Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.318756 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d47b11b2-65de-45ac-9395-81ccca27d279-kube-api-access-5j6dc" (OuterVolumeSpecName: "kube-api-access-5j6dc") pod "d47b11b2-65de-45ac-9395-81ccca27d279" (UID: "d47b11b2-65de-45ac-9395-81ccca27d279"). InnerVolumeSpecName "kube-api-access-5j6dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.345821 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d47b11b2-65de-45ac-9395-81ccca27d279" (UID: "d47b11b2-65de-45ac-9395-81ccca27d279"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.347339 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "d47b11b2-65de-45ac-9395-81ccca27d279" (UID: "d47b11b2-65de-45ac-9395-81ccca27d279"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.413057 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.413097 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j6dc\" (UniqueName: \"kubernetes.io/projected/d47b11b2-65de-45ac-9395-81ccca27d279-kube-api-access-5j6dc\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.413107 4685 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d47b11b2-65de-45ac-9395-81ccca27d279-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.700994 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" event={"ID":"d47b11b2-65de-45ac-9395-81ccca27d279","Type":"ContainerDied","Data":"2cd85d7f50c9bb3c192eb4b61bca6f1dac8293cf392bae71a80882e2c2f3dc20"} Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.701052 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cd85d7f50c9bb3c192eb4b61bca6f1dac8293cf392bae71a80882e2c2f3dc20" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.701066 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2md5s" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.799145 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp"] Dec 02 10:35:31 crc kubenswrapper[4685]: E1202 10:35:31.799939 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d47b11b2-65de-45ac-9395-81ccca27d279" containerName="ssh-known-hosts-edpm-deployment" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.799964 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="d47b11b2-65de-45ac-9395-81ccca27d279" containerName="ssh-known-hosts-edpm-deployment" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.800217 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="d47b11b2-65de-45ac-9395-81ccca27d279" containerName="ssh-known-hosts-edpm-deployment" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.800992 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.806840 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.807043 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.807231 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.808365 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.811097 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp"] Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.922381 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.922446 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:31 crc kubenswrapper[4685]: I1202 10:35:31.922551 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzn6p\" (UniqueName: \"kubernetes.io/projected/003cc841-bf83-4ac8-8b56-f6d451bff580-kube-api-access-rzn6p\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.024138 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzn6p\" (UniqueName: \"kubernetes.io/projected/003cc841-bf83-4ac8-8b56-f6d451bff580-kube-api-access-rzn6p\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.024320 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.024352 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.029705 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.033225 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.049374 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzn6p\" (UniqueName: \"kubernetes.io/projected/003cc841-bf83-4ac8-8b56-f6d451bff580-kube-api-access-rzn6p\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-lc6xp\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.119116 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.685372 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp"] Dec 02 10:35:32 crc kubenswrapper[4685]: I1202 10:35:32.712902 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" event={"ID":"003cc841-bf83-4ac8-8b56-f6d451bff580","Type":"ContainerStarted","Data":"5cbbcde54c3051225f2df860230481ec5708cb6e840fdfb7b4c4fd1158e308be"} Dec 02 10:35:33 crc kubenswrapper[4685]: I1202 10:35:33.737536 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" event={"ID":"003cc841-bf83-4ac8-8b56-f6d451bff580","Type":"ContainerStarted","Data":"5167f95a9ce88f661a83d7419b75a45dca1775901ab1cb99d21da6d91f1a0e8a"} Dec 02 10:35:33 crc kubenswrapper[4685]: I1202 10:35:33.758325 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" podStartSLOduration=2.274754973 podStartE2EDuration="2.75830578s" podCreationTimestamp="2025-12-02 10:35:31 +0000 UTC" firstStartedPulling="2025-12-02 10:35:32.688088411 +0000 UTC m=+2025.059862575" lastFinishedPulling="2025-12-02 10:35:33.171639188 +0000 UTC m=+2025.543413382" observedRunningTime="2025-12-02 10:35:33.758147136 +0000 UTC m=+2026.129921290" watchObservedRunningTime="2025-12-02 10:35:33.75830578 +0000 UTC m=+2026.130079934" Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.148091 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.148788 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.148866 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.150211 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d9a0450853ed82258a1cfaa327b75cdfa803fd37df35161147c84e1dbdb1aba0"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.150314 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://d9a0450853ed82258a1cfaa327b75cdfa803fd37df35161147c84e1dbdb1aba0" gracePeriod=600 Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.840850 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="d9a0450853ed82258a1cfaa327b75cdfa803fd37df35161147c84e1dbdb1aba0" exitCode=0 Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.840905 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"d9a0450853ed82258a1cfaa327b75cdfa803fd37df35161147c84e1dbdb1aba0"} Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.841217 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60"} Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.841242 4685 scope.go:117] "RemoveContainer" containerID="c59ef1937cfb57820cab2f1d045c1c48d5cc9c614d78150f32a035cb7fc36afd" Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.844643 4685 generic.go:334] "Generic (PLEG): container finished" podID="003cc841-bf83-4ac8-8b56-f6d451bff580" containerID="5167f95a9ce88f661a83d7419b75a45dca1775901ab1cb99d21da6d91f1a0e8a" exitCode=0 Dec 02 10:35:42 crc kubenswrapper[4685]: I1202 10:35:42.844734 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" event={"ID":"003cc841-bf83-4ac8-8b56-f6d451bff580","Type":"ContainerDied","Data":"5167f95a9ce88f661a83d7419b75a45dca1775901ab1cb99d21da6d91f1a0e8a"} Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.330035 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.470121 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-ssh-key\") pod \"003cc841-bf83-4ac8-8b56-f6d451bff580\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.470213 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzn6p\" (UniqueName: \"kubernetes.io/projected/003cc841-bf83-4ac8-8b56-f6d451bff580-kube-api-access-rzn6p\") pod \"003cc841-bf83-4ac8-8b56-f6d451bff580\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.470263 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-inventory\") pod \"003cc841-bf83-4ac8-8b56-f6d451bff580\" (UID: \"003cc841-bf83-4ac8-8b56-f6d451bff580\") " Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.486263 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/003cc841-bf83-4ac8-8b56-f6d451bff580-kube-api-access-rzn6p" (OuterVolumeSpecName: "kube-api-access-rzn6p") pod "003cc841-bf83-4ac8-8b56-f6d451bff580" (UID: "003cc841-bf83-4ac8-8b56-f6d451bff580"). InnerVolumeSpecName "kube-api-access-rzn6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.504537 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "003cc841-bf83-4ac8-8b56-f6d451bff580" (UID: "003cc841-bf83-4ac8-8b56-f6d451bff580"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.511656 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-inventory" (OuterVolumeSpecName: "inventory") pod "003cc841-bf83-4ac8-8b56-f6d451bff580" (UID: "003cc841-bf83-4ac8-8b56-f6d451bff580"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.571914 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzn6p\" (UniqueName: \"kubernetes.io/projected/003cc841-bf83-4ac8-8b56-f6d451bff580-kube-api-access-rzn6p\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.572104 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.572163 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/003cc841-bf83-4ac8-8b56-f6d451bff580-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.871647 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" event={"ID":"003cc841-bf83-4ac8-8b56-f6d451bff580","Type":"ContainerDied","Data":"5cbbcde54c3051225f2df860230481ec5708cb6e840fdfb7b4c4fd1158e308be"} Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.871913 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5cbbcde54c3051225f2df860230481ec5708cb6e840fdfb7b4c4fd1158e308be" Dec 02 10:35:44 crc kubenswrapper[4685]: I1202 10:35:44.871684 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-lc6xp" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.000039 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4"] Dec 02 10:35:45 crc kubenswrapper[4685]: E1202 10:35:45.000630 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003cc841-bf83-4ac8-8b56-f6d451bff580" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.000662 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="003cc841-bf83-4ac8-8b56-f6d451bff580" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.000977 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="003cc841-bf83-4ac8-8b56-f6d451bff580" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.002007 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.009195 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.009668 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.009746 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.009888 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.013957 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4"] Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.182763 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2dm8\" (UniqueName: \"kubernetes.io/projected/a639c48a-4ce5-45e3-ae7a-22672b81443f-kube-api-access-q2dm8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.182987 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.183371 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.285209 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.285330 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.285419 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2dm8\" (UniqueName: \"kubernetes.io/projected/a639c48a-4ce5-45e3-ae7a-22672b81443f-kube-api-access-q2dm8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.294863 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.296903 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.303550 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2dm8\" (UniqueName: \"kubernetes.io/projected/a639c48a-4ce5-45e3-ae7a-22672b81443f-kube-api-access-q2dm8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.341052 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:45 crc kubenswrapper[4685]: W1202 10:35:45.898484 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda639c48a_4ce5_45e3_ae7a_22672b81443f.slice/crio-1a5cc8b32a0fada5c2ab2367d187a6347f2efd4b6d945a54f0a92632c3898cea WatchSource:0}: Error finding container 1a5cc8b32a0fada5c2ab2367d187a6347f2efd4b6d945a54f0a92632c3898cea: Status 404 returned error can't find the container with id 1a5cc8b32a0fada5c2ab2367d187a6347f2efd4b6d945a54f0a92632c3898cea Dec 02 10:35:45 crc kubenswrapper[4685]: I1202 10:35:45.913942 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4"] Dec 02 10:35:46 crc kubenswrapper[4685]: I1202 10:35:46.893899 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" event={"ID":"a639c48a-4ce5-45e3-ae7a-22672b81443f","Type":"ContainerStarted","Data":"83a90e346656b9f5055dedfc51bfb88203acf0dcd76ffdfff0191c89cd6a2c28"} Dec 02 10:35:46 crc kubenswrapper[4685]: I1202 10:35:46.894486 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" event={"ID":"a639c48a-4ce5-45e3-ae7a-22672b81443f","Type":"ContainerStarted","Data":"1a5cc8b32a0fada5c2ab2367d187a6347f2efd4b6d945a54f0a92632c3898cea"} Dec 02 10:35:46 crc kubenswrapper[4685]: I1202 10:35:46.922975 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" podStartSLOduration=2.49721764 podStartE2EDuration="2.922953493s" podCreationTimestamp="2025-12-02 10:35:44 +0000 UTC" firstStartedPulling="2025-12-02 10:35:45.904783239 +0000 UTC m=+2038.276557393" lastFinishedPulling="2025-12-02 10:35:46.330519092 +0000 UTC m=+2038.702293246" observedRunningTime="2025-12-02 10:35:46.91668865 +0000 UTC m=+2039.288462844" watchObservedRunningTime="2025-12-02 10:35:46.922953493 +0000 UTC m=+2039.294727647" Dec 02 10:35:57 crc kubenswrapper[4685]: I1202 10:35:57.002487 4685 generic.go:334] "Generic (PLEG): container finished" podID="a639c48a-4ce5-45e3-ae7a-22672b81443f" containerID="83a90e346656b9f5055dedfc51bfb88203acf0dcd76ffdfff0191c89cd6a2c28" exitCode=0 Dec 02 10:35:57 crc kubenswrapper[4685]: I1202 10:35:57.003169 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" event={"ID":"a639c48a-4ce5-45e3-ae7a-22672b81443f","Type":"ContainerDied","Data":"83a90e346656b9f5055dedfc51bfb88203acf0dcd76ffdfff0191c89cd6a2c28"} Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.466449 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.647542 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-ssh-key\") pod \"a639c48a-4ce5-45e3-ae7a-22672b81443f\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.647646 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2dm8\" (UniqueName: \"kubernetes.io/projected/a639c48a-4ce5-45e3-ae7a-22672b81443f-kube-api-access-q2dm8\") pod \"a639c48a-4ce5-45e3-ae7a-22672b81443f\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.647809 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-inventory\") pod \"a639c48a-4ce5-45e3-ae7a-22672b81443f\" (UID: \"a639c48a-4ce5-45e3-ae7a-22672b81443f\") " Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.655818 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a639c48a-4ce5-45e3-ae7a-22672b81443f-kube-api-access-q2dm8" (OuterVolumeSpecName: "kube-api-access-q2dm8") pod "a639c48a-4ce5-45e3-ae7a-22672b81443f" (UID: "a639c48a-4ce5-45e3-ae7a-22672b81443f"). InnerVolumeSpecName "kube-api-access-q2dm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.676218 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-inventory" (OuterVolumeSpecName: "inventory") pod "a639c48a-4ce5-45e3-ae7a-22672b81443f" (UID: "a639c48a-4ce5-45e3-ae7a-22672b81443f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.707473 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a639c48a-4ce5-45e3-ae7a-22672b81443f" (UID: "a639c48a-4ce5-45e3-ae7a-22672b81443f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.750298 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.750348 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a639c48a-4ce5-45e3-ae7a-22672b81443f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:58 crc kubenswrapper[4685]: I1202 10:35:58.750366 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2dm8\" (UniqueName: \"kubernetes.io/projected/a639c48a-4ce5-45e3-ae7a-22672b81443f-kube-api-access-q2dm8\") on node \"crc\" DevicePath \"\"" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.028849 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" event={"ID":"a639c48a-4ce5-45e3-ae7a-22672b81443f","Type":"ContainerDied","Data":"1a5cc8b32a0fada5c2ab2367d187a6347f2efd4b6d945a54f0a92632c3898cea"} Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.028889 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a5cc8b32a0fada5c2ab2367d187a6347f2efd4b6d945a54f0a92632c3898cea" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.028949 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.123210 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw"] Dec 02 10:35:59 crc kubenswrapper[4685]: E1202 10:35:59.123704 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a639c48a-4ce5-45e3-ae7a-22672b81443f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.123731 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a639c48a-4ce5-45e3-ae7a-22672b81443f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.124062 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a639c48a-4ce5-45e3-ae7a-22672b81443f" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.124976 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130165 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130825 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130774 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130806 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130824 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130825 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.130837 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.131139 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.147433 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw"] Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.259894 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.259938 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.259986 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260054 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260094 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260152 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260172 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260194 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d57jl\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-kube-api-access-d57jl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260251 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260273 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260291 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260305 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260345 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.260375 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362382 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362763 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362795 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362832 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d57jl\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-kube-api-access-d57jl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362889 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362928 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362963 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.362994 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.363054 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.363106 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.363179 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.363310 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.363539 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.363610 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.369931 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.370033 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.370137 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.370322 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.371221 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.372394 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.372422 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.373549 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.373631 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.374012 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.375629 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.376400 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.377210 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.383528 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d57jl\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-kube-api-access-d57jl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.444054 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:35:59 crc kubenswrapper[4685]: I1202 10:35:59.974244 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw"] Dec 02 10:36:00 crc kubenswrapper[4685]: I1202 10:36:00.042149 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" event={"ID":"b8bea067-fabf-4a0b-b873-05104a785c39","Type":"ContainerStarted","Data":"e27c9a85e5054d1d5eebbc9ac77b027953dbf48db1d331c8d1c5ba475e665e6d"} Dec 02 10:36:01 crc kubenswrapper[4685]: I1202 10:36:01.052332 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" event={"ID":"b8bea067-fabf-4a0b-b873-05104a785c39","Type":"ContainerStarted","Data":"a5751fd1c0b0c9255d7322ff385fda9a136d71c381bf67b5d8a1e015d7fe0df6"} Dec 02 10:36:01 crc kubenswrapper[4685]: I1202 10:36:01.070459 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" podStartSLOduration=1.554740571 podStartE2EDuration="2.070437699s" podCreationTimestamp="2025-12-02 10:35:59 +0000 UTC" firstStartedPulling="2025-12-02 10:35:59.984522569 +0000 UTC m=+2052.356296723" lastFinishedPulling="2025-12-02 10:36:00.500219697 +0000 UTC m=+2052.871993851" observedRunningTime="2025-12-02 10:36:01.068612478 +0000 UTC m=+2053.440386652" watchObservedRunningTime="2025-12-02 10:36:01.070437699 +0000 UTC m=+2053.442211853" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.759003 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mb65r"] Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.761982 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.775658 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mb65r"] Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.882952 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-catalog-content\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.883718 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-utilities\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.883761 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdq95\" (UniqueName: \"kubernetes.io/projected/31e22aff-9df1-4aa8-b88a-73821c2cbf31-kube-api-access-qdq95\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.986389 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-utilities\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.986448 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdq95\" (UniqueName: \"kubernetes.io/projected/31e22aff-9df1-4aa8-b88a-73821c2cbf31-kube-api-access-qdq95\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.986585 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-catalog-content\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.986965 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-utilities\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:15 crc kubenswrapper[4685]: I1202 10:36:15.987282 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-catalog-content\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:16 crc kubenswrapper[4685]: I1202 10:36:16.006350 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdq95\" (UniqueName: \"kubernetes.io/projected/31e22aff-9df1-4aa8-b88a-73821c2cbf31-kube-api-access-qdq95\") pod \"community-operators-mb65r\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:16 crc kubenswrapper[4685]: I1202 10:36:16.082894 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:16 crc kubenswrapper[4685]: I1202 10:36:16.656101 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mb65r"] Dec 02 10:36:17 crc kubenswrapper[4685]: I1202 10:36:17.224755 4685 generic.go:334] "Generic (PLEG): container finished" podID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerID="495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933" exitCode=0 Dec 02 10:36:17 crc kubenswrapper[4685]: I1202 10:36:17.224878 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mb65r" event={"ID":"31e22aff-9df1-4aa8-b88a-73821c2cbf31","Type":"ContainerDied","Data":"495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933"} Dec 02 10:36:17 crc kubenswrapper[4685]: I1202 10:36:17.225065 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mb65r" event={"ID":"31e22aff-9df1-4aa8-b88a-73821c2cbf31","Type":"ContainerStarted","Data":"79a3ca8e8f99c1857b366fe352889f30d5c5e5ea1f8596fe18a868dbd67921e0"} Dec 02 10:36:19 crc kubenswrapper[4685]: I1202 10:36:19.247208 4685 generic.go:334] "Generic (PLEG): container finished" podID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerID="d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8" exitCode=0 Dec 02 10:36:19 crc kubenswrapper[4685]: I1202 10:36:19.247407 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mb65r" event={"ID":"31e22aff-9df1-4aa8-b88a-73821c2cbf31","Type":"ContainerDied","Data":"d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8"} Dec 02 10:36:20 crc kubenswrapper[4685]: I1202 10:36:20.258438 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mb65r" event={"ID":"31e22aff-9df1-4aa8-b88a-73821c2cbf31","Type":"ContainerStarted","Data":"1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33"} Dec 02 10:36:20 crc kubenswrapper[4685]: I1202 10:36:20.280788 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mb65r" podStartSLOduration=2.858953858 podStartE2EDuration="5.280771665s" podCreationTimestamp="2025-12-02 10:36:15 +0000 UTC" firstStartedPulling="2025-12-02 10:36:17.227352815 +0000 UTC m=+2069.599126979" lastFinishedPulling="2025-12-02 10:36:19.649170622 +0000 UTC m=+2072.020944786" observedRunningTime="2025-12-02 10:36:20.276685473 +0000 UTC m=+2072.648459637" watchObservedRunningTime="2025-12-02 10:36:20.280771665 +0000 UTC m=+2072.652545829" Dec 02 10:36:26 crc kubenswrapper[4685]: I1202 10:36:26.083498 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:26 crc kubenswrapper[4685]: I1202 10:36:26.083980 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:26 crc kubenswrapper[4685]: I1202 10:36:26.135659 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:26 crc kubenswrapper[4685]: I1202 10:36:26.365015 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:27 crc kubenswrapper[4685]: I1202 10:36:27.740671 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mb65r"] Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.338168 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mb65r" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="registry-server" containerID="cri-o://1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33" gracePeriod=2 Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.831324 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.949292 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-utilities\") pod \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.949819 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-catalog-content\") pod \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.949872 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdq95\" (UniqueName: \"kubernetes.io/projected/31e22aff-9df1-4aa8-b88a-73821c2cbf31-kube-api-access-qdq95\") pod \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\" (UID: \"31e22aff-9df1-4aa8-b88a-73821c2cbf31\") " Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.950581 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-utilities" (OuterVolumeSpecName: "utilities") pod "31e22aff-9df1-4aa8-b88a-73821c2cbf31" (UID: "31e22aff-9df1-4aa8-b88a-73821c2cbf31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.959461 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31e22aff-9df1-4aa8-b88a-73821c2cbf31-kube-api-access-qdq95" (OuterVolumeSpecName: "kube-api-access-qdq95") pod "31e22aff-9df1-4aa8-b88a-73821c2cbf31" (UID: "31e22aff-9df1-4aa8-b88a-73821c2cbf31"). InnerVolumeSpecName "kube-api-access-qdq95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:36:28 crc kubenswrapper[4685]: I1202 10:36:28.998157 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31e22aff-9df1-4aa8-b88a-73821c2cbf31" (UID: "31e22aff-9df1-4aa8-b88a-73821c2cbf31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.052468 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.052499 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdq95\" (UniqueName: \"kubernetes.io/projected/31e22aff-9df1-4aa8-b88a-73821c2cbf31-kube-api-access-qdq95\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.052510 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31e22aff-9df1-4aa8-b88a-73821c2cbf31-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.351404 4685 generic.go:334] "Generic (PLEG): container finished" podID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerID="1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33" exitCode=0 Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.351446 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mb65r" event={"ID":"31e22aff-9df1-4aa8-b88a-73821c2cbf31","Type":"ContainerDied","Data":"1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33"} Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.351884 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mb65r" event={"ID":"31e22aff-9df1-4aa8-b88a-73821c2cbf31","Type":"ContainerDied","Data":"79a3ca8e8f99c1857b366fe352889f30d5c5e5ea1f8596fe18a868dbd67921e0"} Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.351918 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mb65r" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.351950 4685 scope.go:117] "RemoveContainer" containerID="1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.403492 4685 scope.go:117] "RemoveContainer" containerID="d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.406530 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mb65r"] Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.416580 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mb65r"] Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.425963 4685 scope.go:117] "RemoveContainer" containerID="495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.467985 4685 scope.go:117] "RemoveContainer" containerID="1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33" Dec 02 10:36:29 crc kubenswrapper[4685]: E1202 10:36:29.468689 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33\": container with ID starting with 1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33 not found: ID does not exist" containerID="1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.469656 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33"} err="failed to get container status \"1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33\": rpc error: code = NotFound desc = could not find container \"1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33\": container with ID starting with 1472c3e4293b7bbaf5886ccd2a9082e4a2f2c300a6d6c30422bbf7f697befa33 not found: ID does not exist" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.469686 4685 scope.go:117] "RemoveContainer" containerID="d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8" Dec 02 10:36:29 crc kubenswrapper[4685]: E1202 10:36:29.470175 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8\": container with ID starting with d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8 not found: ID does not exist" containerID="d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.470222 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8"} err="failed to get container status \"d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8\": rpc error: code = NotFound desc = could not find container \"d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8\": container with ID starting with d7bc37acb3ef02e0638a52f8010d001f806dfd99b4be44b3083425bb6fab5dc8 not found: ID does not exist" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.470257 4685 scope.go:117] "RemoveContainer" containerID="495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933" Dec 02 10:36:29 crc kubenswrapper[4685]: E1202 10:36:29.470596 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933\": container with ID starting with 495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933 not found: ID does not exist" containerID="495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.470627 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933"} err="failed to get container status \"495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933\": rpc error: code = NotFound desc = could not find container \"495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933\": container with ID starting with 495f1dfef34dd1c75587cbf764cae685846aa52cf9f2412a754716960b8ac933 not found: ID does not exist" Dec 02 10:36:29 crc kubenswrapper[4685]: I1202 10:36:29.913919 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" path="/var/lib/kubelet/pods/31e22aff-9df1-4aa8-b88a-73821c2cbf31/volumes" Dec 02 10:36:41 crc kubenswrapper[4685]: I1202 10:36:41.458362 4685 generic.go:334] "Generic (PLEG): container finished" podID="b8bea067-fabf-4a0b-b873-05104a785c39" containerID="a5751fd1c0b0c9255d7322ff385fda9a136d71c381bf67b5d8a1e015d7fe0df6" exitCode=0 Dec 02 10:36:41 crc kubenswrapper[4685]: I1202 10:36:41.458502 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" event={"ID":"b8bea067-fabf-4a0b-b873-05104a785c39","Type":"ContainerDied","Data":"a5751fd1c0b0c9255d7322ff385fda9a136d71c381bf67b5d8a1e015d7fe0df6"} Dec 02 10:36:42 crc kubenswrapper[4685]: I1202 10:36:42.910130 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022536 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-repo-setup-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022640 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-nova-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022704 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-inventory\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022731 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022759 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ovn-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022782 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d57jl\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-kube-api-access-d57jl\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022800 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022870 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-neutron-metadata-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022922 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ssh-key\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022966 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-ovn-default-certs-0\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.022990 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.023044 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-bootstrap-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.023100 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-telemetry-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.023133 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-libvirt-combined-ca-bundle\") pod \"b8bea067-fabf-4a0b-b873-05104a785c39\" (UID: \"b8bea067-fabf-4a0b-b873-05104a785c39\") " Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.029626 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.029691 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.030964 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.031333 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.033214 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.036262 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.036366 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.036631 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-kube-api-access-d57jl" (OuterVolumeSpecName: "kube-api-access-d57jl") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "kube-api-access-d57jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.037041 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.038148 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.040729 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.041712 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.059438 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-inventory" (OuterVolumeSpecName: "inventory") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.062864 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b8bea067-fabf-4a0b-b873-05104a785c39" (UID: "b8bea067-fabf-4a0b-b873-05104a785c39"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126224 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126257 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126271 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126284 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d57jl\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-kube-api-access-d57jl\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126295 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126306 4685 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126315 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126323 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126333 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8bea067-fabf-4a0b-b873-05104a785c39-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126341 4685 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126351 4685 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126359 4685 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126368 4685 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.126376 4685 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bea067-fabf-4a0b-b873-05104a785c39-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.481767 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" event={"ID":"b8bea067-fabf-4a0b-b873-05104a785c39","Type":"ContainerDied","Data":"e27c9a85e5054d1d5eebbc9ac77b027953dbf48db1d331c8d1c5ba475e665e6d"} Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.481810 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e27c9a85e5054d1d5eebbc9ac77b027953dbf48db1d331c8d1c5ba475e665e6d" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.481885 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.593448 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6"] Dec 02 10:36:43 crc kubenswrapper[4685]: E1202 10:36:43.593889 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="extract-content" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.593904 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="extract-content" Dec 02 10:36:43 crc kubenswrapper[4685]: E1202 10:36:43.593919 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8bea067-fabf-4a0b-b873-05104a785c39" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.593926 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8bea067-fabf-4a0b-b873-05104a785c39" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 10:36:43 crc kubenswrapper[4685]: E1202 10:36:43.593940 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="registry-server" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.593946 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="registry-server" Dec 02 10:36:43 crc kubenswrapper[4685]: E1202 10:36:43.593957 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="extract-utilities" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.593963 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="extract-utilities" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.594128 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="31e22aff-9df1-4aa8-b88a-73821c2cbf31" containerName="registry-server" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.594138 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8bea067-fabf-4a0b-b873-05104a785c39" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.594768 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.597335 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.601694 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.601740 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.601823 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.602002 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.615798 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6"] Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.739024 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trxbl\" (UniqueName: \"kubernetes.io/projected/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-kube-api-access-trxbl\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.739461 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.739500 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.739702 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.739857 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.841755 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.841898 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.841965 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trxbl\" (UniqueName: \"kubernetes.io/projected/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-kube-api-access-trxbl\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.842077 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.842131 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.843178 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.845785 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.847480 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.848109 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.857993 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trxbl\" (UniqueName: \"kubernetes.io/projected/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-kube-api-access-trxbl\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-xd6j6\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:43 crc kubenswrapper[4685]: I1202 10:36:43.911277 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:36:44 crc kubenswrapper[4685]: I1202 10:36:44.449797 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6"] Dec 02 10:36:44 crc kubenswrapper[4685]: I1202 10:36:44.493786 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" event={"ID":"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff","Type":"ContainerStarted","Data":"49033de60363a43ce7df5cf40c5c999f8a0dc41c2896f9476e47bd54ec060252"} Dec 02 10:36:46 crc kubenswrapper[4685]: I1202 10:36:46.516585 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" event={"ID":"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff","Type":"ContainerStarted","Data":"6f90ecd522f4ef5df367b13f7faea82069e5b62566613cd2ffd05f754635f880"} Dec 02 10:36:46 crc kubenswrapper[4685]: I1202 10:36:46.545795 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" podStartSLOduration=2.606295019 podStartE2EDuration="3.545769856s" podCreationTimestamp="2025-12-02 10:36:43 +0000 UTC" firstStartedPulling="2025-12-02 10:36:44.461494877 +0000 UTC m=+2096.833269041" lastFinishedPulling="2025-12-02 10:36:45.400969684 +0000 UTC m=+2097.772743878" observedRunningTime="2025-12-02 10:36:46.536238786 +0000 UTC m=+2098.908012970" watchObservedRunningTime="2025-12-02 10:36:46.545769856 +0000 UTC m=+2098.917544030" Dec 02 10:37:42 crc kubenswrapper[4685]: I1202 10:37:42.147698 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:37:42 crc kubenswrapper[4685]: I1202 10:37:42.148439 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:37:56 crc kubenswrapper[4685]: I1202 10:37:56.142829 4685 generic.go:334] "Generic (PLEG): container finished" podID="56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" containerID="6f90ecd522f4ef5df367b13f7faea82069e5b62566613cd2ffd05f754635f880" exitCode=0 Dec 02 10:37:56 crc kubenswrapper[4685]: I1202 10:37:56.142880 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" event={"ID":"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff","Type":"ContainerDied","Data":"6f90ecd522f4ef5df367b13f7faea82069e5b62566613cd2ffd05f754635f880"} Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.569418 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.652728 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-inventory\") pod \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.653081 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovncontroller-config-0\") pod \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.653311 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trxbl\" (UniqueName: \"kubernetes.io/projected/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-kube-api-access-trxbl\") pod \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.653433 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovn-combined-ca-bundle\") pod \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.653471 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ssh-key\") pod \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\" (UID: \"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff\") " Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.658957 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-kube-api-access-trxbl" (OuterVolumeSpecName: "kube-api-access-trxbl") pod "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" (UID: "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff"). InnerVolumeSpecName "kube-api-access-trxbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.672225 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" (UID: "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.680713 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" (UID: "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.684591 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" (UID: "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.688698 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-inventory" (OuterVolumeSpecName: "inventory") pod "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" (UID: "56e7d4c2-fb54-4104-ad31-0d90ceafd0ff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.756508 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.756543 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.756590 4685 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.756606 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trxbl\" (UniqueName: \"kubernetes.io/projected/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-kube-api-access-trxbl\") on node \"crc\" DevicePath \"\"" Dec 02 10:37:57 crc kubenswrapper[4685]: I1202 10:37:57.756619 4685 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56e7d4c2-fb54-4104-ad31-0d90ceafd0ff-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.163031 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" event={"ID":"56e7d4c2-fb54-4104-ad31-0d90ceafd0ff","Type":"ContainerDied","Data":"49033de60363a43ce7df5cf40c5c999f8a0dc41c2896f9476e47bd54ec060252"} Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.163078 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49033de60363a43ce7df5cf40c5c999f8a0dc41c2896f9476e47bd54ec060252" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.163096 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-xd6j6" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.258041 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52"] Dec 02 10:37:58 crc kubenswrapper[4685]: E1202 10:37:58.258471 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.258492 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.258810 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e7d4c2-fb54-4104-ad31-0d90ceafd0ff" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.259469 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.262823 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.263247 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.269596 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.269820 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.277906 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.277992 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.282512 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52"] Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.367033 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.367100 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.367135 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpxsh\" (UniqueName: \"kubernetes.io/projected/9fc6409b-b597-4290-9d9d-313fa733ddf7-kube-api-access-kpxsh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.367288 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.367359 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.367651 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.469489 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.469962 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.472151 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpxsh\" (UniqueName: \"kubernetes.io/projected/9fc6409b-b597-4290-9d9d-313fa733ddf7-kube-api-access-kpxsh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.472338 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.472937 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.473267 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.476409 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.477210 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.478821 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.479726 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.484215 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.495472 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpxsh\" (UniqueName: \"kubernetes.io/projected/9fc6409b-b597-4290-9d9d-313fa733ddf7-kube-api-access-kpxsh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:58 crc kubenswrapper[4685]: I1202 10:37:58.579200 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:37:59 crc kubenswrapper[4685]: I1202 10:37:59.135352 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52"] Dec 02 10:37:59 crc kubenswrapper[4685]: I1202 10:37:59.173141 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" event={"ID":"9fc6409b-b597-4290-9d9d-313fa733ddf7","Type":"ContainerStarted","Data":"9f83ae117009598bef9522846a9a72195a669f12dc7b0f8a46ca63f2833f9a5d"} Dec 02 10:38:01 crc kubenswrapper[4685]: I1202 10:38:01.197669 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" event={"ID":"9fc6409b-b597-4290-9d9d-313fa733ddf7","Type":"ContainerStarted","Data":"c03defe4cf2afef26b1cd9da7c7c832f0c809bea313784ef74249eaf523d27f1"} Dec 02 10:38:01 crc kubenswrapper[4685]: I1202 10:38:01.221643 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" podStartSLOduration=1.666929525 podStartE2EDuration="3.221627117s" podCreationTimestamp="2025-12-02 10:37:58 +0000 UTC" firstStartedPulling="2025-12-02 10:37:59.148098101 +0000 UTC m=+2171.519872255" lastFinishedPulling="2025-12-02 10:38:00.702795693 +0000 UTC m=+2173.074569847" observedRunningTime="2025-12-02 10:38:01.217145533 +0000 UTC m=+2173.588919697" watchObservedRunningTime="2025-12-02 10:38:01.221627117 +0000 UTC m=+2173.593401281" Dec 02 10:38:12 crc kubenswrapper[4685]: I1202 10:38:12.147608 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:38:12 crc kubenswrapper[4685]: I1202 10:38:12.148230 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.147646 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.148184 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.148234 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.148933 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.148977 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" gracePeriod=600 Dec 02 10:38:42 crc kubenswrapper[4685]: E1202 10:38:42.281831 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.565031 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" exitCode=0 Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.565077 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60"} Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.565112 4685 scope.go:117] "RemoveContainer" containerID="d9a0450853ed82258a1cfaa327b75cdfa803fd37df35161147c84e1dbdb1aba0" Dec 02 10:38:42 crc kubenswrapper[4685]: I1202 10:38:42.565802 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:38:42 crc kubenswrapper[4685]: E1202 10:38:42.566050 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:38:49 crc kubenswrapper[4685]: I1202 10:38:49.633862 4685 generic.go:334] "Generic (PLEG): container finished" podID="9fc6409b-b597-4290-9d9d-313fa733ddf7" containerID="c03defe4cf2afef26b1cd9da7c7c832f0c809bea313784ef74249eaf523d27f1" exitCode=0 Dec 02 10:38:49 crc kubenswrapper[4685]: I1202 10:38:49.633907 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" event={"ID":"9fc6409b-b597-4290-9d9d-313fa733ddf7","Type":"ContainerDied","Data":"c03defe4cf2afef26b1cd9da7c7c832f0c809bea313784ef74249eaf523d27f1"} Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.075987 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.110427 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-inventory\") pod \"9fc6409b-b597-4290-9d9d-313fa733ddf7\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.110476 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-ssh-key\") pod \"9fc6409b-b597-4290-9d9d-313fa733ddf7\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.110528 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpxsh\" (UniqueName: \"kubernetes.io/projected/9fc6409b-b597-4290-9d9d-313fa733ddf7-kube-api-access-kpxsh\") pod \"9fc6409b-b597-4290-9d9d-313fa733ddf7\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.110665 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-nova-metadata-neutron-config-0\") pod \"9fc6409b-b597-4290-9d9d-313fa733ddf7\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.110738 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-metadata-combined-ca-bundle\") pod \"9fc6409b-b597-4290-9d9d-313fa733ddf7\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.110839 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"9fc6409b-b597-4290-9d9d-313fa733ddf7\" (UID: \"9fc6409b-b597-4290-9d9d-313fa733ddf7\") " Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.118485 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fc6409b-b597-4290-9d9d-313fa733ddf7-kube-api-access-kpxsh" (OuterVolumeSpecName: "kube-api-access-kpxsh") pod "9fc6409b-b597-4290-9d9d-313fa733ddf7" (UID: "9fc6409b-b597-4290-9d9d-313fa733ddf7"). InnerVolumeSpecName "kube-api-access-kpxsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.146100 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "9fc6409b-b597-4290-9d9d-313fa733ddf7" (UID: "9fc6409b-b597-4290-9d9d-313fa733ddf7"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.157789 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9fc6409b-b597-4290-9d9d-313fa733ddf7" (UID: "9fc6409b-b597-4290-9d9d-313fa733ddf7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.168089 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-inventory" (OuterVolumeSpecName: "inventory") pod "9fc6409b-b597-4290-9d9d-313fa733ddf7" (UID: "9fc6409b-b597-4290-9d9d-313fa733ddf7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.171222 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "9fc6409b-b597-4290-9d9d-313fa733ddf7" (UID: "9fc6409b-b597-4290-9d9d-313fa733ddf7"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.175374 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "9fc6409b-b597-4290-9d9d-313fa733ddf7" (UID: "9fc6409b-b597-4290-9d9d-313fa733ddf7"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.213315 4685 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.213359 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.213372 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.213384 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpxsh\" (UniqueName: \"kubernetes.io/projected/9fc6409b-b597-4290-9d9d-313fa733ddf7-kube-api-access-kpxsh\") on node \"crc\" DevicePath \"\"" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.213399 4685 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.213411 4685 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fc6409b-b597-4290-9d9d-313fa733ddf7-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.657244 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.657142 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52" event={"ID":"9fc6409b-b597-4290-9d9d-313fa733ddf7","Type":"ContainerDied","Data":"9f83ae117009598bef9522846a9a72195a669f12dc7b0f8a46ca63f2833f9a5d"} Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.667777 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f83ae117009598bef9522846a9a72195a669f12dc7b0f8a46ca63f2833f9a5d" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.767718 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm"] Dec 02 10:38:51 crc kubenswrapper[4685]: E1202 10:38:51.768096 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fc6409b-b597-4290-9d9d-313fa733ddf7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.768112 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fc6409b-b597-4290-9d9d-313fa733ddf7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.768290 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fc6409b-b597-4290-9d9d-313fa733ddf7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.768906 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.772755 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.772902 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.773773 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.773878 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.776600 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.794877 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm"] Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.823956 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.824008 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.824086 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.824142 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22rxx\" (UniqueName: \"kubernetes.io/projected/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-kube-api-access-22rxx\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.824237 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.926018 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.926097 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22rxx\" (UniqueName: \"kubernetes.io/projected/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-kube-api-access-22rxx\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.926187 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.926298 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.926320 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.930062 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.930190 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.931285 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.931816 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:51 crc kubenswrapper[4685]: I1202 10:38:51.944004 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22rxx\" (UniqueName: \"kubernetes.io/projected/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-kube-api-access-22rxx\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:52 crc kubenswrapper[4685]: I1202 10:38:52.086621 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:38:52 crc kubenswrapper[4685]: I1202 10:38:52.723453 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm"] Dec 02 10:38:53 crc kubenswrapper[4685]: I1202 10:38:53.723740 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" event={"ID":"5a0df3ff-af5b-4aa9-b108-cc65a1f43571","Type":"ContainerStarted","Data":"9c42730edfc041c32e59b7128097b57fc787d8ccebd331d49bc023bff7d17fca"} Dec 02 10:38:53 crc kubenswrapper[4685]: I1202 10:38:53.724034 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" event={"ID":"5a0df3ff-af5b-4aa9-b108-cc65a1f43571","Type":"ContainerStarted","Data":"a0ec1963b8bcc56f7320193e1803436d652419f97934d6b1d22cd53515c177e1"} Dec 02 10:38:54 crc kubenswrapper[4685]: I1202 10:38:54.900002 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:38:54 crc kubenswrapper[4685]: E1202 10:38:54.900511 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:39:05 crc kubenswrapper[4685]: I1202 10:39:05.900950 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:39:05 crc kubenswrapper[4685]: E1202 10:39:05.901884 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:39:19 crc kubenswrapper[4685]: I1202 10:39:19.899948 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:39:19 crc kubenswrapper[4685]: E1202 10:39:19.900720 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:39:31 crc kubenswrapper[4685]: I1202 10:39:31.900215 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:39:31 crc kubenswrapper[4685]: E1202 10:39:31.900951 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:39:42 crc kubenswrapper[4685]: I1202 10:39:42.899370 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:39:42 crc kubenswrapper[4685]: E1202 10:39:42.900874 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:39:53 crc kubenswrapper[4685]: I1202 10:39:53.901061 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:39:53 crc kubenswrapper[4685]: E1202 10:39:53.901650 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:40:08 crc kubenswrapper[4685]: I1202 10:40:08.901248 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:40:08 crc kubenswrapper[4685]: E1202 10:40:08.902026 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:40:23 crc kubenswrapper[4685]: I1202 10:40:23.900074 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:40:23 crc kubenswrapper[4685]: E1202 10:40:23.901240 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:40:34 crc kubenswrapper[4685]: I1202 10:40:34.900717 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:40:34 crc kubenswrapper[4685]: E1202 10:40:34.901517 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:40:48 crc kubenswrapper[4685]: I1202 10:40:48.899793 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:40:48 crc kubenswrapper[4685]: E1202 10:40:48.900787 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:41:01 crc kubenswrapper[4685]: I1202 10:41:01.899397 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:41:01 crc kubenswrapper[4685]: E1202 10:41:01.901666 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:41:16 crc kubenswrapper[4685]: I1202 10:41:16.900535 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:41:16 crc kubenswrapper[4685]: E1202 10:41:16.901375 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:41:28 crc kubenswrapper[4685]: I1202 10:41:28.900227 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:41:28 crc kubenswrapper[4685]: E1202 10:41:28.900943 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:41:41 crc kubenswrapper[4685]: I1202 10:41:41.899740 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:41:41 crc kubenswrapper[4685]: E1202 10:41:41.900457 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:41:52 crc kubenswrapper[4685]: I1202 10:41:52.899202 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:41:52 crc kubenswrapper[4685]: E1202 10:41:52.901033 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:42:05 crc kubenswrapper[4685]: I1202 10:42:05.900289 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:42:05 crc kubenswrapper[4685]: E1202 10:42:05.901299 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:42:19 crc kubenswrapper[4685]: I1202 10:42:19.900967 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:42:19 crc kubenswrapper[4685]: E1202 10:42:19.901722 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:42:34 crc kubenswrapper[4685]: I1202 10:42:34.900028 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:42:34 crc kubenswrapper[4685]: E1202 10:42:34.901079 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:42:45 crc kubenswrapper[4685]: I1202 10:42:45.902600 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:42:45 crc kubenswrapper[4685]: E1202 10:42:45.903747 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:43:00 crc kubenswrapper[4685]: I1202 10:43:00.900334 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:43:00 crc kubenswrapper[4685]: E1202 10:43:00.901084 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:43:14 crc kubenswrapper[4685]: I1202 10:43:14.900022 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:43:14 crc kubenswrapper[4685]: E1202 10:43:14.900803 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.320141 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" podStartSLOduration=265.769484572 podStartE2EDuration="4m26.320126593s" podCreationTimestamp="2025-12-02 10:38:51 +0000 UTC" firstStartedPulling="2025-12-02 10:38:52.761754945 +0000 UTC m=+2225.133529099" lastFinishedPulling="2025-12-02 10:38:53.312396966 +0000 UTC m=+2225.684171120" observedRunningTime="2025-12-02 10:38:53.744647677 +0000 UTC m=+2226.116421851" watchObservedRunningTime="2025-12-02 10:43:17.320126593 +0000 UTC m=+2489.691900747" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.320773 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8w8bv"] Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.327267 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.361176 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8w8bv"] Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.442885 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqh72\" (UniqueName: \"kubernetes.io/projected/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-kube-api-access-dqh72\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.442994 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-catalog-content\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.443026 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-utilities\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.544596 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqh72\" (UniqueName: \"kubernetes.io/projected/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-kube-api-access-dqh72\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.544699 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-catalog-content\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.544733 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-utilities\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.545346 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-utilities\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.545420 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-catalog-content\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.564950 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqh72\" (UniqueName: \"kubernetes.io/projected/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-kube-api-access-dqh72\") pod \"redhat-marketplace-8w8bv\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:17 crc kubenswrapper[4685]: I1202 10:43:17.656953 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:18 crc kubenswrapper[4685]: I1202 10:43:18.128719 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8w8bv"] Dec 02 10:43:18 crc kubenswrapper[4685]: I1202 10:43:18.308818 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerStarted","Data":"829c5ffbdbcb3512ce635811d6935e12294d38272965c14671c1bbe1dcfeb2f8"} Dec 02 10:43:19 crc kubenswrapper[4685]: I1202 10:43:19.319219 4685 generic.go:334] "Generic (PLEG): container finished" podID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerID="ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65" exitCode=0 Dec 02 10:43:19 crc kubenswrapper[4685]: I1202 10:43:19.319267 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerDied","Data":"ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65"} Dec 02 10:43:19 crc kubenswrapper[4685]: I1202 10:43:19.322007 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:43:20 crc kubenswrapper[4685]: I1202 10:43:20.328762 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerStarted","Data":"3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9"} Dec 02 10:43:21 crc kubenswrapper[4685]: I1202 10:43:21.340110 4685 generic.go:334] "Generic (PLEG): container finished" podID="5a0df3ff-af5b-4aa9-b108-cc65a1f43571" containerID="9c42730edfc041c32e59b7128097b57fc787d8ccebd331d49bc023bff7d17fca" exitCode=0 Dec 02 10:43:21 crc kubenswrapper[4685]: I1202 10:43:21.340216 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" event={"ID":"5a0df3ff-af5b-4aa9-b108-cc65a1f43571","Type":"ContainerDied","Data":"9c42730edfc041c32e59b7128097b57fc787d8ccebd331d49bc023bff7d17fca"} Dec 02 10:43:21 crc kubenswrapper[4685]: I1202 10:43:21.344406 4685 generic.go:334] "Generic (PLEG): container finished" podID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerID="3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9" exitCode=0 Dec 02 10:43:21 crc kubenswrapper[4685]: I1202 10:43:21.344467 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerDied","Data":"3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9"} Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.363710 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerStarted","Data":"7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d"} Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.391921 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8w8bv" podStartSLOduration=2.889094809 podStartE2EDuration="5.391902528s" podCreationTimestamp="2025-12-02 10:43:17 +0000 UTC" firstStartedPulling="2025-12-02 10:43:19.321616605 +0000 UTC m=+2491.693390769" lastFinishedPulling="2025-12-02 10:43:21.824424334 +0000 UTC m=+2494.196198488" observedRunningTime="2025-12-02 10:43:22.38798657 +0000 UTC m=+2494.759760764" watchObservedRunningTime="2025-12-02 10:43:22.391902528 +0000 UTC m=+2494.763676682" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.760261 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.855181 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-ssh-key\") pod \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.855273 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-combined-ca-bundle\") pod \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.855402 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22rxx\" (UniqueName: \"kubernetes.io/projected/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-kube-api-access-22rxx\") pod \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.855519 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-secret-0\") pod \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.855584 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-inventory\") pod \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\" (UID: \"5a0df3ff-af5b-4aa9-b108-cc65a1f43571\") " Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.876266 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5a0df3ff-af5b-4aa9-b108-cc65a1f43571" (UID: "5a0df3ff-af5b-4aa9-b108-cc65a1f43571"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.877394 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-kube-api-access-22rxx" (OuterVolumeSpecName: "kube-api-access-22rxx") pod "5a0df3ff-af5b-4aa9-b108-cc65a1f43571" (UID: "5a0df3ff-af5b-4aa9-b108-cc65a1f43571"). InnerVolumeSpecName "kube-api-access-22rxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.889801 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-inventory" (OuterVolumeSpecName: "inventory") pod "5a0df3ff-af5b-4aa9-b108-cc65a1f43571" (UID: "5a0df3ff-af5b-4aa9-b108-cc65a1f43571"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.892739 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5a0df3ff-af5b-4aa9-b108-cc65a1f43571" (UID: "5a0df3ff-af5b-4aa9-b108-cc65a1f43571"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.908268 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "5a0df3ff-af5b-4aa9-b108-cc65a1f43571" (UID: "5a0df3ff-af5b-4aa9-b108-cc65a1f43571"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.957409 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22rxx\" (UniqueName: \"kubernetes.io/projected/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-kube-api-access-22rxx\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.957445 4685 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.957457 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.957469 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:22 crc kubenswrapper[4685]: I1202 10:43:22.957479 4685 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a0df3ff-af5b-4aa9-b108-cc65a1f43571-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.377248 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.391907 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm" event={"ID":"5a0df3ff-af5b-4aa9-b108-cc65a1f43571","Type":"ContainerDied","Data":"a0ec1963b8bcc56f7320193e1803436d652419f97934d6b1d22cd53515c177e1"} Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.392170 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0ec1963b8bcc56f7320193e1803436d652419f97934d6b1d22cd53515c177e1" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.573180 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh"] Dec 02 10:43:23 crc kubenswrapper[4685]: E1202 10:43:23.574089 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a0df3ff-af5b-4aa9-b108-cc65a1f43571" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.574173 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a0df3ff-af5b-4aa9-b108-cc65a1f43571" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.574447 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a0df3ff-af5b-4aa9-b108-cc65a1f43571" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.575250 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.577824 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.577907 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.578441 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.578746 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.578939 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.585764 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.585893 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.603418 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh"] Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671308 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671377 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671422 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671447 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671504 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671526 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671627 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671661 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5wgq\" (UniqueName: \"kubernetes.io/projected/ab586931-b7c5-450d-831c-ca05dbb865ad-kube-api-access-q5wgq\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.671692 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.773901 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.773962 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774090 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774114 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5wgq\" (UniqueName: \"kubernetes.io/projected/ab586931-b7c5-450d-831c-ca05dbb865ad-kube-api-access-q5wgq\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774148 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774219 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774254 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774327 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.774362 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.775646 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.779544 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.779857 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.780045 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.780440 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.780677 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.780895 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.782787 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.793404 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5wgq\" (UniqueName: \"kubernetes.io/projected/ab586931-b7c5-450d-831c-ca05dbb865ad-kube-api-access-q5wgq\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t8bqh\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:23 crc kubenswrapper[4685]: I1202 10:43:23.899009 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:43:24 crc kubenswrapper[4685]: I1202 10:43:24.436538 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh"] Dec 02 10:43:25 crc kubenswrapper[4685]: I1202 10:43:25.398939 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" event={"ID":"ab586931-b7c5-450d-831c-ca05dbb865ad","Type":"ContainerStarted","Data":"8ddcccf015f18dad10680f314f84f4bf706ca1543869864693b0875cd40721be"} Dec 02 10:43:25 crc kubenswrapper[4685]: I1202 10:43:25.399487 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" event={"ID":"ab586931-b7c5-450d-831c-ca05dbb865ad","Type":"ContainerStarted","Data":"f753d26150caf4e7e550d4df8ba32768f4affa36b4922e5a5d25e944d733dac0"} Dec 02 10:43:25 crc kubenswrapper[4685]: I1202 10:43:25.424122 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" podStartSLOduration=1.831656127 podStartE2EDuration="2.424088321s" podCreationTimestamp="2025-12-02 10:43:23 +0000 UTC" firstStartedPulling="2025-12-02 10:43:24.445529282 +0000 UTC m=+2496.817303446" lastFinishedPulling="2025-12-02 10:43:25.037961486 +0000 UTC m=+2497.409735640" observedRunningTime="2025-12-02 10:43:25.417473351 +0000 UTC m=+2497.789247515" watchObservedRunningTime="2025-12-02 10:43:25.424088321 +0000 UTC m=+2497.795862485" Dec 02 10:43:27 crc kubenswrapper[4685]: I1202 10:43:27.658051 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:27 crc kubenswrapper[4685]: I1202 10:43:27.658408 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:27 crc kubenswrapper[4685]: I1202 10:43:27.710723 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:27 crc kubenswrapper[4685]: I1202 10:43:27.906642 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:43:27 crc kubenswrapper[4685]: E1202 10:43:27.906944 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:43:28 crc kubenswrapper[4685]: I1202 10:43:28.484076 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:28 crc kubenswrapper[4685]: I1202 10:43:28.562716 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8w8bv"] Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.361263 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gdqrx"] Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.363872 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.386623 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gdqrx"] Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.442590 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8w8bv" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="registry-server" containerID="cri-o://7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d" gracePeriod=2 Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.511216 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8h9k\" (UniqueName: \"kubernetes.io/projected/dcd2d110-4b44-4c3e-992c-f833682bd7c3-kube-api-access-n8h9k\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.511280 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-utilities\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.511392 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-catalog-content\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.613891 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8h9k\" (UniqueName: \"kubernetes.io/projected/dcd2d110-4b44-4c3e-992c-f833682bd7c3-kube-api-access-n8h9k\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.614404 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-utilities\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.614501 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-catalog-content\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.614917 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-utilities\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.614923 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-catalog-content\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.647989 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8h9k\" (UniqueName: \"kubernetes.io/projected/dcd2d110-4b44-4c3e-992c-f833682bd7c3-kube-api-access-n8h9k\") pod \"redhat-operators-gdqrx\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.697958 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:30 crc kubenswrapper[4685]: I1202 10:43:30.956201 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.020885 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-catalog-content\") pod \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.020975 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-utilities\") pod \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.021078 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqh72\" (UniqueName: \"kubernetes.io/projected/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-kube-api-access-dqh72\") pod \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\" (UID: \"a4e9bde0-13ff-48b9-86ee-ac6311ca0375\") " Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.023515 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-utilities" (OuterVolumeSpecName: "utilities") pod "a4e9bde0-13ff-48b9-86ee-ac6311ca0375" (UID: "a4e9bde0-13ff-48b9-86ee-ac6311ca0375"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.028115 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-kube-api-access-dqh72" (OuterVolumeSpecName: "kube-api-access-dqh72") pod "a4e9bde0-13ff-48b9-86ee-ac6311ca0375" (UID: "a4e9bde0-13ff-48b9-86ee-ac6311ca0375"). InnerVolumeSpecName "kube-api-access-dqh72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.054162 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4e9bde0-13ff-48b9-86ee-ac6311ca0375" (UID: "a4e9bde0-13ff-48b9-86ee-ac6311ca0375"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.123546 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.123602 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.123617 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqh72\" (UniqueName: \"kubernetes.io/projected/a4e9bde0-13ff-48b9-86ee-ac6311ca0375-kube-api-access-dqh72\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.272309 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gdqrx"] Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.454830 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerStarted","Data":"baf674d952f18d5fb0fd3883ee290e0ae4cf6523a247b0c2b91b0aa192a2eafb"} Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.458883 4685 generic.go:334] "Generic (PLEG): container finished" podID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerID="7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d" exitCode=0 Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.458929 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerDied","Data":"7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d"} Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.458952 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8w8bv" event={"ID":"a4e9bde0-13ff-48b9-86ee-ac6311ca0375","Type":"ContainerDied","Data":"829c5ffbdbcb3512ce635811d6935e12294d38272965c14671c1bbe1dcfeb2f8"} Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.458968 4685 scope.go:117] "RemoveContainer" containerID="7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.459113 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8w8bv" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.498133 4685 scope.go:117] "RemoveContainer" containerID="3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.508644 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8w8bv"] Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.515770 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8w8bv"] Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.530668 4685 scope.go:117] "RemoveContainer" containerID="ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.654610 4685 scope.go:117] "RemoveContainer" containerID="7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d" Dec 02 10:43:31 crc kubenswrapper[4685]: E1202 10:43:31.655346 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d\": container with ID starting with 7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d not found: ID does not exist" containerID="7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.655391 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d"} err="failed to get container status \"7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d\": rpc error: code = NotFound desc = could not find container \"7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d\": container with ID starting with 7d357312351585600113008eb31e6ba3e040eccc37392bdc1442161e67ea981d not found: ID does not exist" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.655417 4685 scope.go:117] "RemoveContainer" containerID="3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9" Dec 02 10:43:31 crc kubenswrapper[4685]: E1202 10:43:31.655912 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9\": container with ID starting with 3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9 not found: ID does not exist" containerID="3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.655956 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9"} err="failed to get container status \"3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9\": rpc error: code = NotFound desc = could not find container \"3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9\": container with ID starting with 3f7e25175da3b21fcd21598af31571454a704950efb7d828b8ef4a112e0e89a9 not found: ID does not exist" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.655983 4685 scope.go:117] "RemoveContainer" containerID="ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65" Dec 02 10:43:31 crc kubenswrapper[4685]: E1202 10:43:31.656298 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65\": container with ID starting with ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65 not found: ID does not exist" containerID="ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.656329 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65"} err="failed to get container status \"ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65\": rpc error: code = NotFound desc = could not find container \"ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65\": container with ID starting with ce156fa2b31b3a1cf767894ed61ba2b65373e519e0552593c189b252132add65 not found: ID does not exist" Dec 02 10:43:31 crc kubenswrapper[4685]: I1202 10:43:31.910847 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" path="/var/lib/kubelet/pods/a4e9bde0-13ff-48b9-86ee-ac6311ca0375/volumes" Dec 02 10:43:32 crc kubenswrapper[4685]: I1202 10:43:32.472838 4685 generic.go:334] "Generic (PLEG): container finished" podID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerID="e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd" exitCode=0 Dec 02 10:43:32 crc kubenswrapper[4685]: I1202 10:43:32.473592 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerDied","Data":"e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd"} Dec 02 10:43:33 crc kubenswrapper[4685]: I1202 10:43:33.482304 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerStarted","Data":"8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a"} Dec 02 10:43:37 crc kubenswrapper[4685]: I1202 10:43:37.528253 4685 generic.go:334] "Generic (PLEG): container finished" podID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerID="8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a" exitCode=0 Dec 02 10:43:37 crc kubenswrapper[4685]: I1202 10:43:37.528316 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerDied","Data":"8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a"} Dec 02 10:43:39 crc kubenswrapper[4685]: I1202 10:43:39.549287 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerStarted","Data":"4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec"} Dec 02 10:43:39 crc kubenswrapper[4685]: I1202 10:43:39.568347 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gdqrx" podStartSLOduration=3.623433522 podStartE2EDuration="9.568324759s" podCreationTimestamp="2025-12-02 10:43:30 +0000 UTC" firstStartedPulling="2025-12-02 10:43:32.474946925 +0000 UTC m=+2504.846721079" lastFinishedPulling="2025-12-02 10:43:38.419838142 +0000 UTC m=+2510.791612316" observedRunningTime="2025-12-02 10:43:39.567776594 +0000 UTC m=+2511.939550768" watchObservedRunningTime="2025-12-02 10:43:39.568324759 +0000 UTC m=+2511.940098923" Dec 02 10:43:39 crc kubenswrapper[4685]: I1202 10:43:39.900522 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:43:39 crc kubenswrapper[4685]: E1202 10:43:39.901140 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:43:40 crc kubenswrapper[4685]: I1202 10:43:40.698149 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:40 crc kubenswrapper[4685]: I1202 10:43:40.699079 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:41 crc kubenswrapper[4685]: I1202 10:43:41.754673 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gdqrx" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="registry-server" probeResult="failure" output=< Dec 02 10:43:41 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 10:43:41 crc kubenswrapper[4685]: > Dec 02 10:43:50 crc kubenswrapper[4685]: I1202 10:43:50.765129 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:50 crc kubenswrapper[4685]: I1202 10:43:50.816973 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:51 crc kubenswrapper[4685]: I1202 10:43:51.014452 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gdqrx"] Dec 02 10:43:52 crc kubenswrapper[4685]: I1202 10:43:52.680852 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gdqrx" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="registry-server" containerID="cri-o://4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec" gracePeriod=2 Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.135723 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.277954 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8h9k\" (UniqueName: \"kubernetes.io/projected/dcd2d110-4b44-4c3e-992c-f833682bd7c3-kube-api-access-n8h9k\") pod \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.278222 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-utilities\") pod \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.278377 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-catalog-content\") pod \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\" (UID: \"dcd2d110-4b44-4c3e-992c-f833682bd7c3\") " Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.279045 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-utilities" (OuterVolumeSpecName: "utilities") pod "dcd2d110-4b44-4c3e-992c-f833682bd7c3" (UID: "dcd2d110-4b44-4c3e-992c-f833682bd7c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.280695 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.288472 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcd2d110-4b44-4c3e-992c-f833682bd7c3-kube-api-access-n8h9k" (OuterVolumeSpecName: "kube-api-access-n8h9k") pod "dcd2d110-4b44-4c3e-992c-f833682bd7c3" (UID: "dcd2d110-4b44-4c3e-992c-f833682bd7c3"). InnerVolumeSpecName "kube-api-access-n8h9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.384006 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8h9k\" (UniqueName: \"kubernetes.io/projected/dcd2d110-4b44-4c3e-992c-f833682bd7c3-kube-api-access-n8h9k\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.413079 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dcd2d110-4b44-4c3e-992c-f833682bd7c3" (UID: "dcd2d110-4b44-4c3e-992c-f833682bd7c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.485669 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcd2d110-4b44-4c3e-992c-f833682bd7c3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.693539 4685 generic.go:334] "Generic (PLEG): container finished" podID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerID="4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec" exitCode=0 Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.693602 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerDied","Data":"4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec"} Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.693627 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gdqrx" event={"ID":"dcd2d110-4b44-4c3e-992c-f833682bd7c3","Type":"ContainerDied","Data":"baf674d952f18d5fb0fd3883ee290e0ae4cf6523a247b0c2b91b0aa192a2eafb"} Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.693646 4685 scope.go:117] "RemoveContainer" containerID="4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.693761 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gdqrx" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.727480 4685 scope.go:117] "RemoveContainer" containerID="8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.735856 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gdqrx"] Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.747212 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gdqrx"] Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.754793 4685 scope.go:117] "RemoveContainer" containerID="e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.801818 4685 scope.go:117] "RemoveContainer" containerID="4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec" Dec 02 10:43:53 crc kubenswrapper[4685]: E1202 10:43:53.802275 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec\": container with ID starting with 4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec not found: ID does not exist" containerID="4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.802323 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec"} err="failed to get container status \"4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec\": rpc error: code = NotFound desc = could not find container \"4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec\": container with ID starting with 4ae011b1c4dad31db16c01672d21401184dceb20a081e00036e116edba6cb7ec not found: ID does not exist" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.802352 4685 scope.go:117] "RemoveContainer" containerID="8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a" Dec 02 10:43:53 crc kubenswrapper[4685]: E1202 10:43:53.802825 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a\": container with ID starting with 8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a not found: ID does not exist" containerID="8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.802858 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a"} err="failed to get container status \"8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a\": rpc error: code = NotFound desc = could not find container \"8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a\": container with ID starting with 8fc29558257ea9178f39523bc1a2af3f66a7def09096a79a63b24ae58f918e6a not found: ID does not exist" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.802878 4685 scope.go:117] "RemoveContainer" containerID="e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd" Dec 02 10:43:53 crc kubenswrapper[4685]: E1202 10:43:53.803108 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd\": container with ID starting with e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd not found: ID does not exist" containerID="e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.803137 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd"} err="failed to get container status \"e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd\": rpc error: code = NotFound desc = could not find container \"e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd\": container with ID starting with e97bbd4f1134c1aa06c70e0e4ba39521c9d9e970299cf190291ab4edd112fbbd not found: ID does not exist" Dec 02 10:43:53 crc kubenswrapper[4685]: I1202 10:43:53.913455 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" path="/var/lib/kubelet/pods/dcd2d110-4b44-4c3e-992c-f833682bd7c3/volumes" Dec 02 10:43:54 crc kubenswrapper[4685]: I1202 10:43:54.900075 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:43:55 crc kubenswrapper[4685]: I1202 10:43:55.714610 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"9255d03204e27cdd2e8bd721f4f8830da2fb1f7433065a8726360aa6e3a277d2"} Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.965222 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xw9wn"] Dec 02 10:44:18 crc kubenswrapper[4685]: E1202 10:44:18.967580 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="extract-content" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.967697 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="extract-content" Dec 02 10:44:18 crc kubenswrapper[4685]: E1202 10:44:18.967780 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="extract-utilities" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.967857 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="extract-utilities" Dec 02 10:44:18 crc kubenswrapper[4685]: E1202 10:44:18.967942 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="registry-server" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.968025 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="registry-server" Dec 02 10:44:18 crc kubenswrapper[4685]: E1202 10:44:18.968105 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="registry-server" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.968184 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="registry-server" Dec 02 10:44:18 crc kubenswrapper[4685]: E1202 10:44:18.968275 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="extract-utilities" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.968352 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="extract-utilities" Dec 02 10:44:18 crc kubenswrapper[4685]: E1202 10:44:18.968444 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="extract-content" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.968520 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="extract-content" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.968966 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcd2d110-4b44-4c3e-992c-f833682bd7c3" containerName="registry-server" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.969066 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4e9bde0-13ff-48b9-86ee-ac6311ca0375" containerName="registry-server" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.973735 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:18 crc kubenswrapper[4685]: I1202 10:44:18.989800 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xw9wn"] Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.083926 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-catalog-content\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.083986 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nws84\" (UniqueName: \"kubernetes.io/projected/2239c498-6bbf-404e-9988-4aae783f8547-kube-api-access-nws84\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.084059 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-utilities\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.185984 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-utilities\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.186376 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-utilities\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.186602 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-catalog-content\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.186850 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-catalog-content\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.186903 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nws84\" (UniqueName: \"kubernetes.io/projected/2239c498-6bbf-404e-9988-4aae783f8547-kube-api-access-nws84\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.217552 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nws84\" (UniqueName: \"kubernetes.io/projected/2239c498-6bbf-404e-9988-4aae783f8547-kube-api-access-nws84\") pod \"certified-operators-xw9wn\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.308395 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:19 crc kubenswrapper[4685]: I1202 10:44:19.977934 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xw9wn"] Dec 02 10:44:20 crc kubenswrapper[4685]: I1202 10:44:20.974233 4685 generic.go:334] "Generic (PLEG): container finished" podID="2239c498-6bbf-404e-9988-4aae783f8547" containerID="ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55" exitCode=0 Dec 02 10:44:20 crc kubenswrapper[4685]: I1202 10:44:20.974392 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerDied","Data":"ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55"} Dec 02 10:44:20 crc kubenswrapper[4685]: I1202 10:44:20.974551 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerStarted","Data":"e5d01dd9c0cb17278b94cc87cf40354fb64d14657021a4de583db7502f05ba93"} Dec 02 10:44:21 crc kubenswrapper[4685]: I1202 10:44:21.984844 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerStarted","Data":"450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61"} Dec 02 10:44:23 crc kubenswrapper[4685]: I1202 10:44:23.013767 4685 generic.go:334] "Generic (PLEG): container finished" podID="2239c498-6bbf-404e-9988-4aae783f8547" containerID="450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61" exitCode=0 Dec 02 10:44:23 crc kubenswrapper[4685]: I1202 10:44:23.014095 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerDied","Data":"450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61"} Dec 02 10:44:24 crc kubenswrapper[4685]: I1202 10:44:24.026461 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerStarted","Data":"96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea"} Dec 02 10:44:24 crc kubenswrapper[4685]: I1202 10:44:24.056215 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xw9wn" podStartSLOduration=3.458644923 podStartE2EDuration="6.056191738s" podCreationTimestamp="2025-12-02 10:44:18 +0000 UTC" firstStartedPulling="2025-12-02 10:44:20.976464617 +0000 UTC m=+2553.348238781" lastFinishedPulling="2025-12-02 10:44:23.574011442 +0000 UTC m=+2555.945785596" observedRunningTime="2025-12-02 10:44:24.043471771 +0000 UTC m=+2556.415245945" watchObservedRunningTime="2025-12-02 10:44:24.056191738 +0000 UTC m=+2556.427965892" Dec 02 10:44:29 crc kubenswrapper[4685]: I1202 10:44:29.308854 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:29 crc kubenswrapper[4685]: I1202 10:44:29.309410 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:29 crc kubenswrapper[4685]: I1202 10:44:29.388397 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:30 crc kubenswrapper[4685]: I1202 10:44:30.143776 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:30 crc kubenswrapper[4685]: I1202 10:44:30.195985 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xw9wn"] Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.112023 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xw9wn" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="registry-server" containerID="cri-o://96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea" gracePeriod=2 Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.530661 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.574601 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-utilities\") pod \"2239c498-6bbf-404e-9988-4aae783f8547\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.574683 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nws84\" (UniqueName: \"kubernetes.io/projected/2239c498-6bbf-404e-9988-4aae783f8547-kube-api-access-nws84\") pod \"2239c498-6bbf-404e-9988-4aae783f8547\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.574711 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-catalog-content\") pod \"2239c498-6bbf-404e-9988-4aae783f8547\" (UID: \"2239c498-6bbf-404e-9988-4aae783f8547\") " Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.575792 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-utilities" (OuterVolumeSpecName: "utilities") pod "2239c498-6bbf-404e-9988-4aae783f8547" (UID: "2239c498-6bbf-404e-9988-4aae783f8547"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.593317 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2239c498-6bbf-404e-9988-4aae783f8547-kube-api-access-nws84" (OuterVolumeSpecName: "kube-api-access-nws84") pod "2239c498-6bbf-404e-9988-4aae783f8547" (UID: "2239c498-6bbf-404e-9988-4aae783f8547"). InnerVolumeSpecName "kube-api-access-nws84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.636949 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2239c498-6bbf-404e-9988-4aae783f8547" (UID: "2239c498-6bbf-404e-9988-4aae783f8547"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.677645 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.677683 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2239c498-6bbf-404e-9988-4aae783f8547-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:44:32 crc kubenswrapper[4685]: I1202 10:44:32.677697 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nws84\" (UniqueName: \"kubernetes.io/projected/2239c498-6bbf-404e-9988-4aae783f8547-kube-api-access-nws84\") on node \"crc\" DevicePath \"\"" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.121368 4685 generic.go:334] "Generic (PLEG): container finished" podID="2239c498-6bbf-404e-9988-4aae783f8547" containerID="96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea" exitCode=0 Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.121429 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xw9wn" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.121449 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerDied","Data":"96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea"} Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.121826 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xw9wn" event={"ID":"2239c498-6bbf-404e-9988-4aae783f8547","Type":"ContainerDied","Data":"e5d01dd9c0cb17278b94cc87cf40354fb64d14657021a4de583db7502f05ba93"} Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.121847 4685 scope.go:117] "RemoveContainer" containerID="96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.141056 4685 scope.go:117] "RemoveContainer" containerID="450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.154985 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xw9wn"] Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.169112 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xw9wn"] Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.176241 4685 scope.go:117] "RemoveContainer" containerID="ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.214796 4685 scope.go:117] "RemoveContainer" containerID="96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea" Dec 02 10:44:33 crc kubenswrapper[4685]: E1202 10:44:33.215193 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea\": container with ID starting with 96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea not found: ID does not exist" containerID="96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.215226 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea"} err="failed to get container status \"96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea\": rpc error: code = NotFound desc = could not find container \"96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea\": container with ID starting with 96768fe59b37677dcac36099d1a8f3817868f2927c3646275d5388551f6ca9ea not found: ID does not exist" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.215246 4685 scope.go:117] "RemoveContainer" containerID="450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61" Dec 02 10:44:33 crc kubenswrapper[4685]: E1202 10:44:33.215464 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61\": container with ID starting with 450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61 not found: ID does not exist" containerID="450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.215492 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61"} err="failed to get container status \"450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61\": rpc error: code = NotFound desc = could not find container \"450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61\": container with ID starting with 450c1ee560aa889ea76afd1b78cd24bbb7fed21e4cd799a46e20301ec5207b61 not found: ID does not exist" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.215506 4685 scope.go:117] "RemoveContainer" containerID="ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55" Dec 02 10:44:33 crc kubenswrapper[4685]: E1202 10:44:33.215806 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55\": container with ID starting with ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55 not found: ID does not exist" containerID="ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.215832 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55"} err="failed to get container status \"ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55\": rpc error: code = NotFound desc = could not find container \"ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55\": container with ID starting with ab9d9d3e12181e02ae702530c691521bd3cb79e211595a19771d1e70a3a75e55 not found: ID does not exist" Dec 02 10:44:33 crc kubenswrapper[4685]: I1202 10:44:33.910249 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2239c498-6bbf-404e-9988-4aae783f8547" path="/var/lib/kubelet/pods/2239c498-6bbf-404e-9988-4aae783f8547/volumes" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.144981 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55"] Dec 02 10:45:00 crc kubenswrapper[4685]: E1202 10:45:00.145993 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="extract-utilities" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.146009 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="extract-utilities" Dec 02 10:45:00 crc kubenswrapper[4685]: E1202 10:45:00.146043 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="registry-server" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.146053 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="registry-server" Dec 02 10:45:00 crc kubenswrapper[4685]: E1202 10:45:00.146087 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="extract-content" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.146094 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="extract-content" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.146392 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="2239c498-6bbf-404e-9988-4aae783f8547" containerName="registry-server" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.147516 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.149534 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.149791 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.155474 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55"] Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.218680 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bb13d1a2-e090-4d30-9d75-9834ab612b3f-config-volume\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.218724 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bb13d1a2-e090-4d30-9d75-9834ab612b3f-secret-volume\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.218887 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm52p\" (UniqueName: \"kubernetes.io/projected/bb13d1a2-e090-4d30-9d75-9834ab612b3f-kube-api-access-pm52p\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.321046 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bb13d1a2-e090-4d30-9d75-9834ab612b3f-config-volume\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.321108 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bb13d1a2-e090-4d30-9d75-9834ab612b3f-secret-volume\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.321234 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm52p\" (UniqueName: \"kubernetes.io/projected/bb13d1a2-e090-4d30-9d75-9834ab612b3f-kube-api-access-pm52p\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.323763 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bb13d1a2-e090-4d30-9d75-9834ab612b3f-config-volume\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.328730 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bb13d1a2-e090-4d30-9d75-9834ab612b3f-secret-volume\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.339374 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm52p\" (UniqueName: \"kubernetes.io/projected/bb13d1a2-e090-4d30-9d75-9834ab612b3f-kube-api-access-pm52p\") pod \"collect-profiles-29411205-jkz55\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.467459 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:00 crc kubenswrapper[4685]: I1202 10:45:00.897760 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55"] Dec 02 10:45:01 crc kubenswrapper[4685]: I1202 10:45:01.385269 4685 generic.go:334] "Generic (PLEG): container finished" podID="bb13d1a2-e090-4d30-9d75-9834ab612b3f" containerID="880bed20b5cfe87fec131fddd03a20900d55c34f5677ce472ff2d15454994ccc" exitCode=0 Dec 02 10:45:01 crc kubenswrapper[4685]: I1202 10:45:01.385311 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" event={"ID":"bb13d1a2-e090-4d30-9d75-9834ab612b3f","Type":"ContainerDied","Data":"880bed20b5cfe87fec131fddd03a20900d55c34f5677ce472ff2d15454994ccc"} Dec 02 10:45:01 crc kubenswrapper[4685]: I1202 10:45:01.385623 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" event={"ID":"bb13d1a2-e090-4d30-9d75-9834ab612b3f","Type":"ContainerStarted","Data":"1d2fae8b30a11610d52e40889ad695dcda08e3044ba55b682d8b35ca4c44449d"} Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.706076 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.764447 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bb13d1a2-e090-4d30-9d75-9834ab612b3f-secret-volume\") pod \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.764547 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm52p\" (UniqueName: \"kubernetes.io/projected/bb13d1a2-e090-4d30-9d75-9834ab612b3f-kube-api-access-pm52p\") pod \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.764794 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bb13d1a2-e090-4d30-9d75-9834ab612b3f-config-volume\") pod \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\" (UID: \"bb13d1a2-e090-4d30-9d75-9834ab612b3f\") " Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.766066 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb13d1a2-e090-4d30-9d75-9834ab612b3f-config-volume" (OuterVolumeSpecName: "config-volume") pod "bb13d1a2-e090-4d30-9d75-9834ab612b3f" (UID: "bb13d1a2-e090-4d30-9d75-9834ab612b3f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.771457 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb13d1a2-e090-4d30-9d75-9834ab612b3f-kube-api-access-pm52p" (OuterVolumeSpecName: "kube-api-access-pm52p") pod "bb13d1a2-e090-4d30-9d75-9834ab612b3f" (UID: "bb13d1a2-e090-4d30-9d75-9834ab612b3f"). InnerVolumeSpecName "kube-api-access-pm52p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.772015 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb13d1a2-e090-4d30-9d75-9834ab612b3f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bb13d1a2-e090-4d30-9d75-9834ab612b3f" (UID: "bb13d1a2-e090-4d30-9d75-9834ab612b3f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.866915 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bb13d1a2-e090-4d30-9d75-9834ab612b3f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.867161 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm52p\" (UniqueName: \"kubernetes.io/projected/bb13d1a2-e090-4d30-9d75-9834ab612b3f-kube-api-access-pm52p\") on node \"crc\" DevicePath \"\"" Dec 02 10:45:02 crc kubenswrapper[4685]: I1202 10:45:02.867263 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bb13d1a2-e090-4d30-9d75-9834ab612b3f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 10:45:03 crc kubenswrapper[4685]: I1202 10:45:03.404491 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" event={"ID":"bb13d1a2-e090-4d30-9d75-9834ab612b3f","Type":"ContainerDied","Data":"1d2fae8b30a11610d52e40889ad695dcda08e3044ba55b682d8b35ca4c44449d"} Dec 02 10:45:03 crc kubenswrapper[4685]: I1202 10:45:03.405019 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d2fae8b30a11610d52e40889ad695dcda08e3044ba55b682d8b35ca4c44449d" Dec 02 10:45:03 crc kubenswrapper[4685]: I1202 10:45:03.404546 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411205-jkz55" Dec 02 10:45:03 crc kubenswrapper[4685]: I1202 10:45:03.806613 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76"] Dec 02 10:45:03 crc kubenswrapper[4685]: I1202 10:45:03.821048 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411160-sjs76"] Dec 02 10:45:03 crc kubenswrapper[4685]: I1202 10:45:03.911913 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7def2210-de93-4381-84dd-fe0d507f76b3" path="/var/lib/kubelet/pods/7def2210-de93-4381-84dd-fe0d507f76b3/volumes" Dec 02 10:45:04 crc kubenswrapper[4685]: I1202 10:45:04.485931 4685 scope.go:117] "RemoveContainer" containerID="726fbe8b86c5da91b6b34c6facc006767c92adcaa568cd3d60030a53a1f4d9dd" Dec 02 10:46:12 crc kubenswrapper[4685]: I1202 10:46:12.147624 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:46:12 crc kubenswrapper[4685]: I1202 10:46:12.148194 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:46:17 crc kubenswrapper[4685]: I1202 10:46:17.146163 4685 generic.go:334] "Generic (PLEG): container finished" podID="ab586931-b7c5-450d-831c-ca05dbb865ad" containerID="8ddcccf015f18dad10680f314f84f4bf706ca1543869864693b0875cd40721be" exitCode=0 Dec 02 10:46:17 crc kubenswrapper[4685]: I1202 10:46:17.146252 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" event={"ID":"ab586931-b7c5-450d-831c-ca05dbb865ad","Type":"ContainerDied","Data":"8ddcccf015f18dad10680f314f84f4bf706ca1543869864693b0875cd40721be"} Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.604991 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.743243 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-0\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.743348 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-0\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.743401 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-extra-config-0\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.743439 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5wgq\" (UniqueName: \"kubernetes.io/projected/ab586931-b7c5-450d-831c-ca05dbb865ad-kube-api-access-q5wgq\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.743471 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-1\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.743540 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-1\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.744213 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-inventory\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.744254 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-combined-ca-bundle\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.744316 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-ssh-key\") pod \"ab586931-b7c5-450d-831c-ca05dbb865ad\" (UID: \"ab586931-b7c5-450d-831c-ca05dbb865ad\") " Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.749260 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab586931-b7c5-450d-831c-ca05dbb865ad-kube-api-access-q5wgq" (OuterVolumeSpecName: "kube-api-access-q5wgq") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "kube-api-access-q5wgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.769519 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.775023 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.783262 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.784736 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.797588 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-inventory" (OuterVolumeSpecName: "inventory") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.800673 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.804318 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.809762 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "ab586931-b7c5-450d-831c-ca05dbb865ad" (UID: "ab586931-b7c5-450d-831c-ca05dbb865ad"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864318 4685 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864356 4685 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864365 4685 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864385 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5wgq\" (UniqueName: \"kubernetes.io/projected/ab586931-b7c5-450d-831c-ca05dbb865ad-kube-api-access-q5wgq\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864394 4685 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864403 4685 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864413 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864426 4685 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:18 crc kubenswrapper[4685]: I1202 10:46:18.864434 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab586931-b7c5-450d-831c-ca05dbb865ad-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.181883 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" event={"ID":"ab586931-b7c5-450d-831c-ca05dbb865ad","Type":"ContainerDied","Data":"f753d26150caf4e7e550d4df8ba32768f4affa36b4922e5a5d25e944d733dac0"} Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.181927 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f753d26150caf4e7e550d4df8ba32768f4affa36b4922e5a5d25e944d733dac0" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.182025 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t8bqh" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.481675 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr"] Dec 02 10:46:19 crc kubenswrapper[4685]: E1202 10:46:19.482303 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab586931-b7c5-450d-831c-ca05dbb865ad" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.482372 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab586931-b7c5-450d-831c-ca05dbb865ad" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 10:46:19 crc kubenswrapper[4685]: E1202 10:46:19.482444 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb13d1a2-e090-4d30-9d75-9834ab612b3f" containerName="collect-profiles" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.482498 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb13d1a2-e090-4d30-9d75-9834ab612b3f" containerName="collect-profiles" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.482757 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb13d1a2-e090-4d30-9d75-9834ab612b3f" containerName="collect-profiles" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.482841 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab586931-b7c5-450d-831c-ca05dbb865ad" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.483627 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.486635 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.486798 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.487067 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-wgxf8" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.487369 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.487780 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.492515 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr"] Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.608994 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.609053 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.609154 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.609204 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jrrq\" (UniqueName: \"kubernetes.io/projected/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-kube-api-access-6jrrq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.609251 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.609351 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.609406 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.710796 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.710855 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.710893 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.710929 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jrrq\" (UniqueName: \"kubernetes.io/projected/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-kube-api-access-6jrrq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.710967 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.711045 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.711088 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.717156 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.717387 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.718048 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.723426 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.724908 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.725384 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.729801 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jrrq\" (UniqueName: \"kubernetes.io/projected/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-kube-api-access-6jrrq\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:19 crc kubenswrapper[4685]: I1202 10:46:19.798211 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:46:20 crc kubenswrapper[4685]: I1202 10:46:20.346724 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr"] Dec 02 10:46:21 crc kubenswrapper[4685]: I1202 10:46:21.207599 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" event={"ID":"cefd0695-3aac-4aa9-b035-f6e6d22d79f7","Type":"ContainerStarted","Data":"827386130bce597cc4ea54cdd3882f8e98e500ebe7eaaa75334ea545c494cc12"} Dec 02 10:46:21 crc kubenswrapper[4685]: I1202 10:46:21.210646 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" event={"ID":"cefd0695-3aac-4aa9-b035-f6e6d22d79f7","Type":"ContainerStarted","Data":"6640f3a1d126c886f072b09f86424009329f5a39f46509ae885d4d2ca5ed94e5"} Dec 02 10:46:21 crc kubenswrapper[4685]: I1202 10:46:21.251514 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" podStartSLOduration=1.746382316 podStartE2EDuration="2.251489118s" podCreationTimestamp="2025-12-02 10:46:19 +0000 UTC" firstStartedPulling="2025-12-02 10:46:20.35030121 +0000 UTC m=+2672.722075364" lastFinishedPulling="2025-12-02 10:46:20.855408012 +0000 UTC m=+2673.227182166" observedRunningTime="2025-12-02 10:46:21.248894179 +0000 UTC m=+2673.620668333" watchObservedRunningTime="2025-12-02 10:46:21.251489118 +0000 UTC m=+2673.623263272" Dec 02 10:46:42 crc kubenswrapper[4685]: I1202 10:46:42.147842 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:46:42 crc kubenswrapper[4685]: I1202 10:46:42.148444 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.604587 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f6vtc"] Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.610004 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.616131 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6vtc"] Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.701675 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01a29fff-fc06-40a0-bf72-a2290597aeaa-utilities\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.702061 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mdb6\" (UniqueName: \"kubernetes.io/projected/01a29fff-fc06-40a0-bf72-a2290597aeaa-kube-api-access-7mdb6\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.702198 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01a29fff-fc06-40a0-bf72-a2290597aeaa-catalog-content\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.803606 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mdb6\" (UniqueName: \"kubernetes.io/projected/01a29fff-fc06-40a0-bf72-a2290597aeaa-kube-api-access-7mdb6\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.803685 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01a29fff-fc06-40a0-bf72-a2290597aeaa-catalog-content\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.803716 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01a29fff-fc06-40a0-bf72-a2290597aeaa-utilities\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.804189 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01a29fff-fc06-40a0-bf72-a2290597aeaa-utilities\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.804554 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01a29fff-fc06-40a0-bf72-a2290597aeaa-catalog-content\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.835171 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mdb6\" (UniqueName: \"kubernetes.io/projected/01a29fff-fc06-40a0-bf72-a2290597aeaa-kube-api-access-7mdb6\") pod \"community-operators-f6vtc\" (UID: \"01a29fff-fc06-40a0-bf72-a2290597aeaa\") " pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:51 crc kubenswrapper[4685]: I1202 10:46:51.933610 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:46:52 crc kubenswrapper[4685]: I1202 10:46:52.474640 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6vtc"] Dec 02 10:46:53 crc kubenswrapper[4685]: I1202 10:46:53.497459 4685 generic.go:334] "Generic (PLEG): container finished" podID="01a29fff-fc06-40a0-bf72-a2290597aeaa" containerID="06c1f4be300b7e0923874e21fe660931eef0c4dea0a57cf756c1ce5684b0db55" exitCode=0 Dec 02 10:46:53 crc kubenswrapper[4685]: I1202 10:46:53.497509 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vtc" event={"ID":"01a29fff-fc06-40a0-bf72-a2290597aeaa","Type":"ContainerDied","Data":"06c1f4be300b7e0923874e21fe660931eef0c4dea0a57cf756c1ce5684b0db55"} Dec 02 10:46:53 crc kubenswrapper[4685]: I1202 10:46:53.497731 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vtc" event={"ID":"01a29fff-fc06-40a0-bf72-a2290597aeaa","Type":"ContainerStarted","Data":"d1ed543d62b16b13aa39edc33aadf81a1b0e7ddad504201d5d685e76ee9753a1"} Dec 02 10:46:58 crc kubenswrapper[4685]: I1202 10:46:58.553219 4685 generic.go:334] "Generic (PLEG): container finished" podID="01a29fff-fc06-40a0-bf72-a2290597aeaa" containerID="1e4b0b3d7bcc5adbff88b6a2db369cc5df771881df487ec7296648a9eff41d5a" exitCode=0 Dec 02 10:46:58 crc kubenswrapper[4685]: I1202 10:46:58.553276 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vtc" event={"ID":"01a29fff-fc06-40a0-bf72-a2290597aeaa","Type":"ContainerDied","Data":"1e4b0b3d7bcc5adbff88b6a2db369cc5df771881df487ec7296648a9eff41d5a"} Dec 02 10:46:59 crc kubenswrapper[4685]: I1202 10:46:59.563180 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vtc" event={"ID":"01a29fff-fc06-40a0-bf72-a2290597aeaa","Type":"ContainerStarted","Data":"164f545708cdd53cb2aef36993bc49fe23c44ab4ef78cf55b19280d841762bf5"} Dec 02 10:46:59 crc kubenswrapper[4685]: I1202 10:46:59.587595 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f6vtc" podStartSLOduration=3.07485604 podStartE2EDuration="8.587548764s" podCreationTimestamp="2025-12-02 10:46:51 +0000 UTC" firstStartedPulling="2025-12-02 10:46:53.499151972 +0000 UTC m=+2705.870926126" lastFinishedPulling="2025-12-02 10:46:59.011844696 +0000 UTC m=+2711.383618850" observedRunningTime="2025-12-02 10:46:59.581633062 +0000 UTC m=+2711.953407226" watchObservedRunningTime="2025-12-02 10:46:59.587548764 +0000 UTC m=+2711.959322918" Dec 02 10:47:01 crc kubenswrapper[4685]: I1202 10:47:01.934115 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:47:01 crc kubenswrapper[4685]: I1202 10:47:01.935178 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:47:01 crc kubenswrapper[4685]: I1202 10:47:01.987397 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:47:11 crc kubenswrapper[4685]: I1202 10:47:11.985981 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f6vtc" Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.059963 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6vtc"] Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.126656 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jkc9w"] Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.126957 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jkc9w" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="registry-server" containerID="cri-o://960e56704d1ab02966cf512a6266c0cbc03a297a55984c3dcf5647083cac26b7" gracePeriod=2 Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.147640 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.147696 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.147738 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.148657 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9255d03204e27cdd2e8bd721f4f8830da2fb1f7433065a8726360aa6e3a277d2"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.148713 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://9255d03204e27cdd2e8bd721f4f8830da2fb1f7433065a8726360aa6e3a277d2" gracePeriod=600 Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.692787 4685 generic.go:334] "Generic (PLEG): container finished" podID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerID="960e56704d1ab02966cf512a6266c0cbc03a297a55984c3dcf5647083cac26b7" exitCode=0 Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.692881 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerDied","Data":"960e56704d1ab02966cf512a6266c0cbc03a297a55984c3dcf5647083cac26b7"} Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.696170 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="9255d03204e27cdd2e8bd721f4f8830da2fb1f7433065a8726360aa6e3a277d2" exitCode=0 Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.696238 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"9255d03204e27cdd2e8bd721f4f8830da2fb1f7433065a8726360aa6e3a277d2"} Dec 02 10:47:12 crc kubenswrapper[4685]: I1202 10:47:12.696295 4685 scope.go:117] "RemoveContainer" containerID="0e4bb50f3a66f0f1ccf97e7d20faeebc0bc83dc16871a5c9f537956b3ad8ee60" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.002879 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.026384 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-catalog-content\") pod \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.026451 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmnr6\" (UniqueName: \"kubernetes.io/projected/1f9b0e49-5d75-4e57-a136-4723a4a12c65-kube-api-access-zmnr6\") pod \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.026634 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-utilities\") pod \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\" (UID: \"1f9b0e49-5d75-4e57-a136-4723a4a12c65\") " Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.046600 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-utilities" (OuterVolumeSpecName: "utilities") pod "1f9b0e49-5d75-4e57-a136-4723a4a12c65" (UID: "1f9b0e49-5d75-4e57-a136-4723a4a12c65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.077553 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f9b0e49-5d75-4e57-a136-4723a4a12c65-kube-api-access-zmnr6" (OuterVolumeSpecName: "kube-api-access-zmnr6") pod "1f9b0e49-5d75-4e57-a136-4723a4a12c65" (UID: "1f9b0e49-5d75-4e57-a136-4723a4a12c65"). InnerVolumeSpecName "kube-api-access-zmnr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.128379 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.128411 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmnr6\" (UniqueName: \"kubernetes.io/projected/1f9b0e49-5d75-4e57-a136-4723a4a12c65-kube-api-access-zmnr6\") on node \"crc\" DevicePath \"\"" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.144937 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f9b0e49-5d75-4e57-a136-4723a4a12c65" (UID: "1f9b0e49-5d75-4e57-a136-4723a4a12c65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.229789 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f9b0e49-5d75-4e57-a136-4723a4a12c65-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.714112 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990"} Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.716034 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jkc9w" event={"ID":"1f9b0e49-5d75-4e57-a136-4723a4a12c65","Type":"ContainerDied","Data":"3537e4af5d48985e2e826c24b2796444c1e6baa55d876133abb41b8bd2198c46"} Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.716069 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jkc9w" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.716092 4685 scope.go:117] "RemoveContainer" containerID="960e56704d1ab02966cf512a6266c0cbc03a297a55984c3dcf5647083cac26b7" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.791391 4685 scope.go:117] "RemoveContainer" containerID="ece16396d36387bb05835f72d6e7f940f8e9e8da1113b6616d81997233d4b593" Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.791415 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jkc9w"] Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.803523 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jkc9w"] Dec 02 10:47:14 crc kubenswrapper[4685]: I1202 10:47:14.826746 4685 scope.go:117] "RemoveContainer" containerID="5bad5ce49a62f4a67bd8fa88ee176a23c04d6c2be891a9f6586b46a4301c3e13" Dec 02 10:47:15 crc kubenswrapper[4685]: I1202 10:47:15.914366 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" path="/var/lib/kubelet/pods/1f9b0e49-5d75-4e57-a136-4723a4a12c65/volumes" Dec 02 10:49:18 crc kubenswrapper[4685]: I1202 10:49:18.869959 4685 generic.go:334] "Generic (PLEG): container finished" podID="cefd0695-3aac-4aa9-b035-f6e6d22d79f7" containerID="827386130bce597cc4ea54cdd3882f8e98e500ebe7eaaa75334ea545c494cc12" exitCode=0 Dec 02 10:49:18 crc kubenswrapper[4685]: I1202 10:49:18.870015 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" event={"ID":"cefd0695-3aac-4aa9-b035-f6e6d22d79f7","Type":"ContainerDied","Data":"827386130bce597cc4ea54cdd3882f8e98e500ebe7eaaa75334ea545c494cc12"} Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.282373 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.429892 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jrrq\" (UniqueName: \"kubernetes.io/projected/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-kube-api-access-6jrrq\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.430066 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-0\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.430142 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ssh-key\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.430214 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-inventory\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.430382 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-telemetry-combined-ca-bundle\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.430437 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-1\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.430482 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-2\") pod \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\" (UID: \"cefd0695-3aac-4aa9-b035-f6e6d22d79f7\") " Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.440836 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.440818 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-kube-api-access-6jrrq" (OuterVolumeSpecName: "kube-api-access-6jrrq") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "kube-api-access-6jrrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.459694 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.463785 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.464614 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-inventory" (OuterVolumeSpecName: "inventory") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.472336 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.480107 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "cefd0695-3aac-4aa9-b035-f6e6d22d79f7" (UID: "cefd0695-3aac-4aa9-b035-f6e6d22d79f7"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.532976 4685 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.533018 4685 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.533032 4685 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.533044 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jrrq\" (UniqueName: \"kubernetes.io/projected/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-kube-api-access-6jrrq\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.533056 4685 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.533067 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.533077 4685 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cefd0695-3aac-4aa9-b035-f6e6d22d79f7-inventory\") on node \"crc\" DevicePath \"\"" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.896475 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" event={"ID":"cefd0695-3aac-4aa9-b035-f6e6d22d79f7","Type":"ContainerDied","Data":"6640f3a1d126c886f072b09f86424009329f5a39f46509ae885d4d2ca5ed94e5"} Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.896580 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6640f3a1d126c886f072b09f86424009329f5a39f46509ae885d4d2ca5ed94e5" Dec 02 10:49:20 crc kubenswrapper[4685]: I1202 10:49:20.896693 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr" Dec 02 10:49:42 crc kubenswrapper[4685]: I1202 10:49:42.147681 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:49:42 crc kubenswrapper[4685]: I1202 10:49:42.148210 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:50:12 crc kubenswrapper[4685]: I1202 10:50:12.148002 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:50:12 crc kubenswrapper[4685]: I1202 10:50:12.148643 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.070737 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 10:50:22 crc kubenswrapper[4685]: E1202 10:50:22.075188 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cefd0695-3aac-4aa9-b035-f6e6d22d79f7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.075237 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="cefd0695-3aac-4aa9-b035-f6e6d22d79f7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 10:50:22 crc kubenswrapper[4685]: E1202 10:50:22.075260 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="extract-content" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.075274 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="extract-content" Dec 02 10:50:22 crc kubenswrapper[4685]: E1202 10:50:22.075327 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="extract-utilities" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.075341 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="extract-utilities" Dec 02 10:50:22 crc kubenswrapper[4685]: E1202 10:50:22.075380 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="registry-server" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.075393 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="registry-server" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.075771 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="cefd0695-3aac-4aa9-b035-f6e6d22d79f7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.075804 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9b0e49-5d75-4e57-a136-4723a4a12c65" containerName="registry-server" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.076903 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.080229 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.080530 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.081001 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gw4tw" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.082243 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.084424 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191286 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191352 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191537 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191790 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191810 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q7kj\" (UniqueName: \"kubernetes.io/projected/27023d6a-a566-4be7-9244-915ce922dd05-kube-api-access-9q7kj\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191880 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191925 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-config-data\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.191953 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.192092 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.293294 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.293476 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294448 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294657 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294833 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294861 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q7kj\" (UniqueName: \"kubernetes.io/projected/27023d6a-a566-4be7-9244-915ce922dd05-kube-api-access-9q7kj\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294906 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294929 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.294954 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-config-data\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.295033 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.295766 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.295900 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.296146 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-config-data\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.296317 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.305981 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.306417 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.307714 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.323458 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q7kj\" (UniqueName: \"kubernetes.io/projected/27023d6a-a566-4be7-9244-915ce922dd05-kube-api-access-9q7kj\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.338041 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"tempest-tests-tempest\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.396827 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.869866 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 02 10:50:22 crc kubenswrapper[4685]: W1202 10:50:22.875707 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27023d6a_a566_4be7_9244_915ce922dd05.slice/crio-2934de3be04f4d90e217a890e3b14fb478a57b9967b0fc45286e7d4cf656597e WatchSource:0}: Error finding container 2934de3be04f4d90e217a890e3b14fb478a57b9967b0fc45286e7d4cf656597e: Status 404 returned error can't find the container with id 2934de3be04f4d90e217a890e3b14fb478a57b9967b0fc45286e7d4cf656597e Dec 02 10:50:22 crc kubenswrapper[4685]: I1202 10:50:22.879840 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 10:50:23 crc kubenswrapper[4685]: I1202 10:50:23.566219 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"27023d6a-a566-4be7-9244-915ce922dd05","Type":"ContainerStarted","Data":"2934de3be04f4d90e217a890e3b14fb478a57b9967b0fc45286e7d4cf656597e"} Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.147865 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.148379 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.148418 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.149261 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.149317 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" gracePeriod=600 Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.759819 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" exitCode=0 Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.760009 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990"} Dec 02 10:50:42 crc kubenswrapper[4685]: I1202 10:50:42.760180 4685 scope.go:117] "RemoveContainer" containerID="9255d03204e27cdd2e8bd721f4f8830da2fb1f7433065a8726360aa6e3a277d2" Dec 02 10:51:01 crc kubenswrapper[4685]: E1202 10:51:01.418611 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:51:01 crc kubenswrapper[4685]: E1202 10:51:01.499861 4685 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 02 10:51:01 crc kubenswrapper[4685]: E1202 10:51:01.502049 4685 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9q7kj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(27023d6a-a566-4be7-9244-915ce922dd05): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 02 10:51:01 crc kubenswrapper[4685]: E1202 10:51:01.503376 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="27023d6a-a566-4be7-9244-915ce922dd05" Dec 02 10:51:01 crc kubenswrapper[4685]: I1202 10:51:01.990735 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:51:01 crc kubenswrapper[4685]: E1202 10:51:01.991151 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:51:01 crc kubenswrapper[4685]: E1202 10:51:01.993039 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="27023d6a-a566-4be7-9244-915ce922dd05" Dec 02 10:51:13 crc kubenswrapper[4685]: I1202 10:51:13.901090 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:51:13 crc kubenswrapper[4685]: E1202 10:51:13.902297 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:51:17 crc kubenswrapper[4685]: I1202 10:51:17.782315 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 02 10:51:19 crc kubenswrapper[4685]: I1202 10:51:19.154960 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"27023d6a-a566-4be7-9244-915ce922dd05","Type":"ContainerStarted","Data":"eeec88ef0467ebc6dfc2d4de792239b3268ae4ab38304fa23a6d4a6f51ba4153"} Dec 02 10:51:19 crc kubenswrapper[4685]: I1202 10:51:19.179100 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.279869802 podStartE2EDuration="58.179072905s" podCreationTimestamp="2025-12-02 10:50:21 +0000 UTC" firstStartedPulling="2025-12-02 10:50:22.879551794 +0000 UTC m=+2915.251325958" lastFinishedPulling="2025-12-02 10:51:17.778754897 +0000 UTC m=+2970.150529061" observedRunningTime="2025-12-02 10:51:19.174633893 +0000 UTC m=+2971.546408057" watchObservedRunningTime="2025-12-02 10:51:19.179072905 +0000 UTC m=+2971.550847089" Dec 02 10:51:27 crc kubenswrapper[4685]: I1202 10:51:27.908277 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:51:27 crc kubenswrapper[4685]: E1202 10:51:27.909164 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:51:42 crc kubenswrapper[4685]: I1202 10:51:42.899877 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:51:42 crc kubenswrapper[4685]: E1202 10:51:42.900591 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:51:56 crc kubenswrapper[4685]: I1202 10:51:56.900508 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:51:56 crc kubenswrapper[4685]: E1202 10:51:56.901343 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:52:10 crc kubenswrapper[4685]: I1202 10:52:10.900443 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:52:10 crc kubenswrapper[4685]: E1202 10:52:10.901060 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:52:25 crc kubenswrapper[4685]: I1202 10:52:25.899950 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:52:25 crc kubenswrapper[4685]: E1202 10:52:25.900726 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:52:38 crc kubenswrapper[4685]: I1202 10:52:38.901785 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:52:38 crc kubenswrapper[4685]: E1202 10:52:38.903650 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:52:45 crc kubenswrapper[4685]: I1202 10:52:45.996993 4685 generic.go:334] "Generic (PLEG): container finished" podID="27023d6a-a566-4be7-9244-915ce922dd05" containerID="eeec88ef0467ebc6dfc2d4de792239b3268ae4ab38304fa23a6d4a6f51ba4153" exitCode=0 Dec 02 10:52:45 crc kubenswrapper[4685]: I1202 10:52:45.997115 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"27023d6a-a566-4be7-9244-915ce922dd05","Type":"ContainerDied","Data":"eeec88ef0467ebc6dfc2d4de792239b3268ae4ab38304fa23a6d4a6f51ba4153"} Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.656663 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.805767 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-config-data\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.805925 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.805969 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ssh-key\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.805988 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9q7kj\" (UniqueName: \"kubernetes.io/projected/27023d6a-a566-4be7-9244-915ce922dd05-kube-api-access-9q7kj\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.806010 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ca-certs\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.806063 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.806152 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-temporary\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.806194 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config-secret\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.806228 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-workdir\") pod \"27023d6a-a566-4be7-9244-915ce922dd05\" (UID: \"27023d6a-a566-4be7-9244-915ce922dd05\") " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.807205 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-config-data" (OuterVolumeSpecName: "config-data") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.808150 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.824203 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.845754 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27023d6a-a566-4be7-9244-915ce922dd05-kube-api-access-9q7kj" (OuterVolumeSpecName: "kube-api-access-9q7kj") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "kube-api-access-9q7kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.848572 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "test-operator-logs") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.849689 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.850237 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.860210 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.913763 4685 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914008 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9q7kj\" (UniqueName: \"kubernetes.io/projected/27023d6a-a566-4be7-9244-915ce922dd05-kube-api-access-9q7kj\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914019 4685 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914044 4685 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914087 4685 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914097 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914109 4685 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/27023d6a-a566-4be7-9244-915ce922dd05-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.914121 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.921209 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "27023d6a-a566-4be7-9244-915ce922dd05" (UID: "27023d6a-a566-4be7-9244-915ce922dd05"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 10:52:47 crc kubenswrapper[4685]: I1202 10:52:47.953637 4685 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 02 10:52:48 crc kubenswrapper[4685]: I1202 10:52:48.014986 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"27023d6a-a566-4be7-9244-915ce922dd05","Type":"ContainerDied","Data":"2934de3be04f4d90e217a890e3b14fb478a57b9967b0fc45286e7d4cf656597e"} Dec 02 10:52:48 crc kubenswrapper[4685]: I1202 10:52:48.015282 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2934de3be04f4d90e217a890e3b14fb478a57b9967b0fc45286e7d4cf656597e" Dec 02 10:52:48 crc kubenswrapper[4685]: I1202 10:52:48.015593 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 02 10:52:48 crc kubenswrapper[4685]: I1202 10:52:48.019081 4685 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:48 crc kubenswrapper[4685]: I1202 10:52:48.019232 4685 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/27023d6a-a566-4be7-9244-915ce922dd05-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 02 10:52:49 crc kubenswrapper[4685]: I1202 10:52:49.900260 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:52:49 crc kubenswrapper[4685]: E1202 10:52:49.900689 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.496997 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 10:52:54 crc kubenswrapper[4685]: E1202 10:52:54.498090 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27023d6a-a566-4be7-9244-915ce922dd05" containerName="tempest-tests-tempest-tests-runner" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.498111 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="27023d6a-a566-4be7-9244-915ce922dd05" containerName="tempest-tests-tempest-tests-runner" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.498411 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="27023d6a-a566-4be7-9244-915ce922dd05" containerName="tempest-tests-tempest-tests-runner" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.499704 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.502468 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gw4tw" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.508783 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.648103 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.648264 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrxpr\" (UniqueName: \"kubernetes.io/projected/0063a6a8-b535-4747-b1bd-8680aea722af-kube-api-access-zrxpr\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.750499 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrxpr\" (UniqueName: \"kubernetes.io/projected/0063a6a8-b535-4747-b1bd-8680aea722af-kube-api-access-zrxpr\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.750693 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.751214 4685 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.788332 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrxpr\" (UniqueName: \"kubernetes.io/projected/0063a6a8-b535-4747-b1bd-8680aea722af-kube-api-access-zrxpr\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.794408 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"0063a6a8-b535-4747-b1bd-8680aea722af\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:54 crc kubenswrapper[4685]: I1202 10:52:54.839020 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 02 10:52:55 crc kubenswrapper[4685]: I1202 10:52:55.326015 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 02 10:52:56 crc kubenswrapper[4685]: I1202 10:52:56.104121 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"0063a6a8-b535-4747-b1bd-8680aea722af","Type":"ContainerStarted","Data":"e051586fc5fb983bbf109727c3778bfa1506b82f1b0f4c05a907a6177416bc8d"} Dec 02 10:52:58 crc kubenswrapper[4685]: I1202 10:52:58.126593 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"0063a6a8-b535-4747-b1bd-8680aea722af","Type":"ContainerStarted","Data":"99e8bbceca6f88f882436768cb59fcce7cef18222c5c71338cab4ecc351d52b1"} Dec 02 10:52:58 crc kubenswrapper[4685]: I1202 10:52:58.149385 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.382701851 podStartE2EDuration="4.149367546s" podCreationTimestamp="2025-12-02 10:52:54 +0000 UTC" firstStartedPulling="2025-12-02 10:52:55.343332777 +0000 UTC m=+3067.715106971" lastFinishedPulling="2025-12-02 10:52:57.109998472 +0000 UTC m=+3069.481772666" observedRunningTime="2025-12-02 10:52:58.145814309 +0000 UTC m=+3070.517588494" watchObservedRunningTime="2025-12-02 10:52:58.149367546 +0000 UTC m=+3070.521141690" Dec 02 10:53:03 crc kubenswrapper[4685]: I1202 10:53:03.899932 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:53:03 crc kubenswrapper[4685]: E1202 10:53:03.900997 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:53:15 crc kubenswrapper[4685]: I1202 10:53:15.901055 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:53:15 crc kubenswrapper[4685]: E1202 10:53:15.902121 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.516999 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wh8tx/must-gather-8tghj"] Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.519131 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.524095 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wh8tx"/"openshift-service-ca.crt" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.524119 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wh8tx"/"default-dockercfg-t7n2d" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.528212 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wh8tx"/"kube-root-ca.crt" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.628513 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wh8tx/must-gather-8tghj"] Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.674201 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs7vs\" (UniqueName: \"kubernetes.io/projected/fdccfbcc-d473-4935-ad48-669a64b86c9f-kube-api-access-rs7vs\") pod \"must-gather-8tghj\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.674242 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fdccfbcc-d473-4935-ad48-669a64b86c9f-must-gather-output\") pod \"must-gather-8tghj\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.776959 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs7vs\" (UniqueName: \"kubernetes.io/projected/fdccfbcc-d473-4935-ad48-669a64b86c9f-kube-api-access-rs7vs\") pod \"must-gather-8tghj\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.776997 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fdccfbcc-d473-4935-ad48-669a64b86c9f-must-gather-output\") pod \"must-gather-8tghj\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.777376 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fdccfbcc-d473-4935-ad48-669a64b86c9f-must-gather-output\") pod \"must-gather-8tghj\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.798649 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs7vs\" (UniqueName: \"kubernetes.io/projected/fdccfbcc-d473-4935-ad48-669a64b86c9f-kube-api-access-rs7vs\") pod \"must-gather-8tghj\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:20 crc kubenswrapper[4685]: I1202 10:53:20.839133 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:53:21 crc kubenswrapper[4685]: I1202 10:53:21.333055 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wh8tx/must-gather-8tghj"] Dec 02 10:53:21 crc kubenswrapper[4685]: I1202 10:53:21.380822 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/must-gather-8tghj" event={"ID":"fdccfbcc-d473-4935-ad48-669a64b86c9f","Type":"ContainerStarted","Data":"e78f5261573c059b2753055a2e858dccdcc39b4fc88358d846dfa1b43e757737"} Dec 02 10:53:26 crc kubenswrapper[4685]: I1202 10:53:26.426993 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/must-gather-8tghj" event={"ID":"fdccfbcc-d473-4935-ad48-669a64b86c9f","Type":"ContainerStarted","Data":"88602fb6255545abbb669ad09a1234a4b4b7a572f47c56eea61fba04e751db4a"} Dec 02 10:53:26 crc kubenswrapper[4685]: I1202 10:53:26.427448 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/must-gather-8tghj" event={"ID":"fdccfbcc-d473-4935-ad48-669a64b86c9f","Type":"ContainerStarted","Data":"1b8a675a11c49bb220540232d1c7addea1c8fdd8c53d3b0dce79658a8e85edb0"} Dec 02 10:53:26 crc kubenswrapper[4685]: I1202 10:53:26.899841 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:53:26 crc kubenswrapper[4685]: E1202 10:53:26.900329 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:53:29 crc kubenswrapper[4685]: I1202 10:53:29.863259 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wh8tx/must-gather-8tghj" podStartSLOduration=5.663178043 podStartE2EDuration="9.863239349s" podCreationTimestamp="2025-12-02 10:53:20 +0000 UTC" firstStartedPulling="2025-12-02 10:53:21.339967493 +0000 UTC m=+3093.711741647" lastFinishedPulling="2025-12-02 10:53:25.540028759 +0000 UTC m=+3097.911802953" observedRunningTime="2025-12-02 10:53:27.464728479 +0000 UTC m=+3099.836502703" watchObservedRunningTime="2025-12-02 10:53:29.863239349 +0000 UTC m=+3102.235013503" Dec 02 10:53:29 crc kubenswrapper[4685]: I1202 10:53:29.868631 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wh8tx/crc-debug-649zd"] Dec 02 10:53:29 crc kubenswrapper[4685]: I1202 10:53:29.870183 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:29 crc kubenswrapper[4685]: I1202 10:53:29.959043 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-host\") pod \"crc-debug-649zd\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:29 crc kubenswrapper[4685]: I1202 10:53:29.959217 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxzc8\" (UniqueName: \"kubernetes.io/projected/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-kube-api-access-nxzc8\") pod \"crc-debug-649zd\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:30 crc kubenswrapper[4685]: I1202 10:53:30.061266 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxzc8\" (UniqueName: \"kubernetes.io/projected/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-kube-api-access-nxzc8\") pod \"crc-debug-649zd\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:30 crc kubenswrapper[4685]: I1202 10:53:30.061343 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-host\") pod \"crc-debug-649zd\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:30 crc kubenswrapper[4685]: I1202 10:53:30.061464 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-host\") pod \"crc-debug-649zd\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:30 crc kubenswrapper[4685]: I1202 10:53:30.093055 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxzc8\" (UniqueName: \"kubernetes.io/projected/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-kube-api-access-nxzc8\") pod \"crc-debug-649zd\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:30 crc kubenswrapper[4685]: I1202 10:53:30.186900 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:30 crc kubenswrapper[4685]: I1202 10:53:30.466853 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/crc-debug-649zd" event={"ID":"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b","Type":"ContainerStarted","Data":"4d53bbbccf357898027425185e236d5d4b88abf5a94e613896847c5c5a20ad41"} Dec 02 10:53:40 crc kubenswrapper[4685]: I1202 10:53:40.899842 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:53:40 crc kubenswrapper[4685]: E1202 10:53:40.900710 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:53:41 crc kubenswrapper[4685]: I1202 10:53:41.591542 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/crc-debug-649zd" event={"ID":"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b","Type":"ContainerStarted","Data":"fbd67bac0e1018bf6351d4f784440741471501e294560166d9577567cfc2d873"} Dec 02 10:53:41 crc kubenswrapper[4685]: I1202 10:53:41.608502 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wh8tx/crc-debug-649zd" podStartSLOduration=1.719876921 podStartE2EDuration="12.608485611s" podCreationTimestamp="2025-12-02 10:53:29 +0000 UTC" firstStartedPulling="2025-12-02 10:53:30.238724123 +0000 UTC m=+3102.610498277" lastFinishedPulling="2025-12-02 10:53:41.127332813 +0000 UTC m=+3113.499106967" observedRunningTime="2025-12-02 10:53:41.606482496 +0000 UTC m=+3113.978256650" watchObservedRunningTime="2025-12-02 10:53:41.608485611 +0000 UTC m=+3113.980259765" Dec 02 10:53:51 crc kubenswrapper[4685]: I1202 10:53:51.900460 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:53:51 crc kubenswrapper[4685]: E1202 10:53:51.901927 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.152105 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6sq5r"] Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.155383 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.162429 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6sq5r"] Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.229732 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-utilities\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.229793 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-catalog-content\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.230098 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7nwp\" (UniqueName: \"kubernetes.io/projected/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-kube-api-access-b7nwp\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.331757 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-utilities\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.331807 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-catalog-content\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.331889 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7nwp\" (UniqueName: \"kubernetes.io/projected/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-kube-api-access-b7nwp\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.332538 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-utilities\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.332659 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-catalog-content\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.356489 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7nwp\" (UniqueName: \"kubernetes.io/projected/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-kube-api-access-b7nwp\") pod \"redhat-operators-6sq5r\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:55 crc kubenswrapper[4685]: I1202 10:53:55.483009 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:53:56 crc kubenswrapper[4685]: I1202 10:53:56.713666 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6sq5r"] Dec 02 10:53:56 crc kubenswrapper[4685]: I1202 10:53:56.714285 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/crc-debug-649zd" event={"ID":"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b","Type":"ContainerDied","Data":"fbd67bac0e1018bf6351d4f784440741471501e294560166d9577567cfc2d873"} Dec 02 10:53:56 crc kubenswrapper[4685]: I1202 10:53:56.713781 4685 generic.go:334] "Generic (PLEG): container finished" podID="1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" containerID="fbd67bac0e1018bf6351d4f784440741471501e294560166d9577567cfc2d873" exitCode=0 Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.724962 4685 generic.go:334] "Generic (PLEG): container finished" podID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerID="6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0" exitCode=0 Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.725027 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerDied","Data":"6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0"} Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.725262 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerStarted","Data":"f98eff5ea3d295e3f3a449653c4c0d0ce39be3a01fe6533729fbb407115f092f"} Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.824354 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.861017 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wh8tx/crc-debug-649zd"] Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.872187 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wh8tx/crc-debug-649zd"] Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.886901 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-host\") pod \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.887041 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-host" (OuterVolumeSpecName: "host") pod "1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" (UID: "1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.887103 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxzc8\" (UniqueName: \"kubernetes.io/projected/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-kube-api-access-nxzc8\") pod \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\" (UID: \"1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b\") " Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.887526 4685 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-host\") on node \"crc\" DevicePath \"\"" Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.900205 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-kube-api-access-nxzc8" (OuterVolumeSpecName: "kube-api-access-nxzc8") pod "1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" (UID: "1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b"). InnerVolumeSpecName "kube-api-access-nxzc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.935916 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" path="/var/lib/kubelet/pods/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b/volumes" Dec 02 10:53:57 crc kubenswrapper[4685]: I1202 10:53:57.988717 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxzc8\" (UniqueName: \"kubernetes.io/projected/1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b-kube-api-access-nxzc8\") on node \"crc\" DevicePath \"\"" Dec 02 10:53:58 crc kubenswrapper[4685]: I1202 10:53:58.735796 4685 scope.go:117] "RemoveContainer" containerID="fbd67bac0e1018bf6351d4f784440741471501e294560166d9577567cfc2d873" Dec 02 10:53:58 crc kubenswrapper[4685]: I1202 10:53:58.735870 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-649zd" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.057622 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wh8tx/crc-debug-4b79s"] Dec 02 10:53:59 crc kubenswrapper[4685]: E1202 10:53:59.057989 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" containerName="container-00" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.058004 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" containerName="container-00" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.058205 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fde844f-4cfe-4b13-bb2d-9cbbed77ac5b" containerName="container-00" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.058774 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.114146 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b78f7e1-a624-439f-af6a-0f28aebb23b3-host\") pod \"crc-debug-4b79s\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.114314 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rtnj\" (UniqueName: \"kubernetes.io/projected/4b78f7e1-a624-439f-af6a-0f28aebb23b3-kube-api-access-7rtnj\") pod \"crc-debug-4b79s\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.216271 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rtnj\" (UniqueName: \"kubernetes.io/projected/4b78f7e1-a624-439f-af6a-0f28aebb23b3-kube-api-access-7rtnj\") pod \"crc-debug-4b79s\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.216429 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b78f7e1-a624-439f-af6a-0f28aebb23b3-host\") pod \"crc-debug-4b79s\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.216585 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b78f7e1-a624-439f-af6a-0f28aebb23b3-host\") pod \"crc-debug-4b79s\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.240322 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rtnj\" (UniqueName: \"kubernetes.io/projected/4b78f7e1-a624-439f-af6a-0f28aebb23b3-kube-api-access-7rtnj\") pod \"crc-debug-4b79s\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.373145 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:53:59 crc kubenswrapper[4685]: W1202 10:53:59.418363 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b78f7e1_a624_439f_af6a_0f28aebb23b3.slice/crio-1f201b76d18b54c2477a23812b4bcef6e2936f13bcc6ecc8c6dae6f997b43bc9 WatchSource:0}: Error finding container 1f201b76d18b54c2477a23812b4bcef6e2936f13bcc6ecc8c6dae6f997b43bc9: Status 404 returned error can't find the container with id 1f201b76d18b54c2477a23812b4bcef6e2936f13bcc6ecc8c6dae6f997b43bc9 Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.745361 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerStarted","Data":"75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59"} Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.749307 4685 generic.go:334] "Generic (PLEG): container finished" podID="4b78f7e1-a624-439f-af6a-0f28aebb23b3" containerID="1ab1521960fb3a086e9886ad52d672aa371c97f27fab51d43a9f459bca57ce54" exitCode=1 Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.749348 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/crc-debug-4b79s" event={"ID":"4b78f7e1-a624-439f-af6a-0f28aebb23b3","Type":"ContainerDied","Data":"1ab1521960fb3a086e9886ad52d672aa371c97f27fab51d43a9f459bca57ce54"} Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.749369 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/crc-debug-4b79s" event={"ID":"4b78f7e1-a624-439f-af6a-0f28aebb23b3","Type":"ContainerStarted","Data":"1f201b76d18b54c2477a23812b4bcef6e2936f13bcc6ecc8c6dae6f997b43bc9"} Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.816856 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wh8tx/crc-debug-4b79s"] Dec 02 10:53:59 crc kubenswrapper[4685]: I1202 10:53:59.829052 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wh8tx/crc-debug-4b79s"] Dec 02 10:54:00 crc kubenswrapper[4685]: I1202 10:54:00.879594 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:54:00 crc kubenswrapper[4685]: I1202 10:54:00.948489 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b78f7e1-a624-439f-af6a-0f28aebb23b3-host\") pod \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " Dec 02 10:54:00 crc kubenswrapper[4685]: I1202 10:54:00.948620 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b78f7e1-a624-439f-af6a-0f28aebb23b3-host" (OuterVolumeSpecName: "host") pod "4b78f7e1-a624-439f-af6a-0f28aebb23b3" (UID: "4b78f7e1-a624-439f-af6a-0f28aebb23b3"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 10:54:00 crc kubenswrapper[4685]: I1202 10:54:00.948628 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rtnj\" (UniqueName: \"kubernetes.io/projected/4b78f7e1-a624-439f-af6a-0f28aebb23b3-kube-api-access-7rtnj\") pod \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\" (UID: \"4b78f7e1-a624-439f-af6a-0f28aebb23b3\") " Dec 02 10:54:00 crc kubenswrapper[4685]: I1202 10:54:00.950285 4685 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4b78f7e1-a624-439f-af6a-0f28aebb23b3-host\") on node \"crc\" DevicePath \"\"" Dec 02 10:54:00 crc kubenswrapper[4685]: I1202 10:54:00.976076 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b78f7e1-a624-439f-af6a-0f28aebb23b3-kube-api-access-7rtnj" (OuterVolumeSpecName: "kube-api-access-7rtnj") pod "4b78f7e1-a624-439f-af6a-0f28aebb23b3" (UID: "4b78f7e1-a624-439f-af6a-0f28aebb23b3"). InnerVolumeSpecName "kube-api-access-7rtnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:54:01 crc kubenswrapper[4685]: I1202 10:54:01.052472 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rtnj\" (UniqueName: \"kubernetes.io/projected/4b78f7e1-a624-439f-af6a-0f28aebb23b3-kube-api-access-7rtnj\") on node \"crc\" DevicePath \"\"" Dec 02 10:54:01 crc kubenswrapper[4685]: I1202 10:54:01.768390 4685 scope.go:117] "RemoveContainer" containerID="1ab1521960fb3a086e9886ad52d672aa371c97f27fab51d43a9f459bca57ce54" Dec 02 10:54:01 crc kubenswrapper[4685]: I1202 10:54:01.768409 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/crc-debug-4b79s" Dec 02 10:54:01 crc kubenswrapper[4685]: I1202 10:54:01.909965 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b78f7e1-a624-439f-af6a-0f28aebb23b3" path="/var/lib/kubelet/pods/4b78f7e1-a624-439f-af6a-0f28aebb23b3/volumes" Dec 02 10:54:02 crc kubenswrapper[4685]: I1202 10:54:02.778742 4685 generic.go:334] "Generic (PLEG): container finished" podID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerID="75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59" exitCode=0 Dec 02 10:54:02 crc kubenswrapper[4685]: I1202 10:54:02.778801 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerDied","Data":"75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59"} Dec 02 10:54:04 crc kubenswrapper[4685]: I1202 10:54:04.800640 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerStarted","Data":"390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5"} Dec 02 10:54:04 crc kubenswrapper[4685]: I1202 10:54:04.824946 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6sq5r" podStartSLOduration=3.3970291980000002 podStartE2EDuration="9.824922147s" podCreationTimestamp="2025-12-02 10:53:55 +0000 UTC" firstStartedPulling="2025-12-02 10:53:57.728781221 +0000 UTC m=+3130.100555375" lastFinishedPulling="2025-12-02 10:54:04.15667416 +0000 UTC m=+3136.528448324" observedRunningTime="2025-12-02 10:54:04.8164962 +0000 UTC m=+3137.188270354" watchObservedRunningTime="2025-12-02 10:54:04.824922147 +0000 UTC m=+3137.196696311" Dec 02 10:54:05 crc kubenswrapper[4685]: I1202 10:54:05.483689 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:54:05 crc kubenswrapper[4685]: I1202 10:54:05.483885 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:54:06 crc kubenswrapper[4685]: I1202 10:54:06.529827 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6sq5r" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="registry-server" probeResult="failure" output=< Dec 02 10:54:06 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 10:54:06 crc kubenswrapper[4685]: > Dec 02 10:54:06 crc kubenswrapper[4685]: I1202 10:54:06.899744 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:54:06 crc kubenswrapper[4685]: E1202 10:54:06.900334 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:54:15 crc kubenswrapper[4685]: I1202 10:54:15.546941 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:54:15 crc kubenswrapper[4685]: I1202 10:54:15.623917 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:54:15 crc kubenswrapper[4685]: I1202 10:54:15.807882 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6sq5r"] Dec 02 10:54:16 crc kubenswrapper[4685]: I1202 10:54:16.932171 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6sq5r" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="registry-server" containerID="cri-o://390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5" gracePeriod=2 Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.425779 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.453734 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-catalog-content\") pod \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.462922 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7nwp\" (UniqueName: \"kubernetes.io/projected/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-kube-api-access-b7nwp\") pod \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.463082 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-utilities\") pod \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\" (UID: \"4edc8dd0-5012-4d21-b6de-61ddaf58faf0\") " Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.466408 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-utilities" (OuterVolumeSpecName: "utilities") pod "4edc8dd0-5012-4d21-b6de-61ddaf58faf0" (UID: "4edc8dd0-5012-4d21-b6de-61ddaf58faf0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.482907 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-kube-api-access-b7nwp" (OuterVolumeSpecName: "kube-api-access-b7nwp") pod "4edc8dd0-5012-4d21-b6de-61ddaf58faf0" (UID: "4edc8dd0-5012-4d21-b6de-61ddaf58faf0"). InnerVolumeSpecName "kube-api-access-b7nwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.564925 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.564955 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7nwp\" (UniqueName: \"kubernetes.io/projected/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-kube-api-access-b7nwp\") on node \"crc\" DevicePath \"\"" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.586699 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4edc8dd0-5012-4d21-b6de-61ddaf58faf0" (UID: "4edc8dd0-5012-4d21-b6de-61ddaf58faf0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.666512 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4edc8dd0-5012-4d21-b6de-61ddaf58faf0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.941615 4685 generic.go:334] "Generic (PLEG): container finished" podID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerID="390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5" exitCode=0 Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.941669 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerDied","Data":"390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5"} Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.941693 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6sq5r" event={"ID":"4edc8dd0-5012-4d21-b6de-61ddaf58faf0","Type":"ContainerDied","Data":"f98eff5ea3d295e3f3a449653c4c0d0ce39be3a01fe6533729fbb407115f092f"} Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.941712 4685 scope.go:117] "RemoveContainer" containerID="390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.941830 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6sq5r" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.958841 4685 scope.go:117] "RemoveContainer" containerID="75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.964243 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6sq5r"] Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.981055 4685 scope.go:117] "RemoveContainer" containerID="6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0" Dec 02 10:54:17 crc kubenswrapper[4685]: I1202 10:54:17.986270 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6sq5r"] Dec 02 10:54:18 crc kubenswrapper[4685]: I1202 10:54:18.018621 4685 scope.go:117] "RemoveContainer" containerID="390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5" Dec 02 10:54:18 crc kubenswrapper[4685]: E1202 10:54:18.019093 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5\": container with ID starting with 390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5 not found: ID does not exist" containerID="390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5" Dec 02 10:54:18 crc kubenswrapper[4685]: I1202 10:54:18.019135 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5"} err="failed to get container status \"390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5\": rpc error: code = NotFound desc = could not find container \"390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5\": container with ID starting with 390607a5b8a0387a45648dc5120b3a3425c98bddc2b5ee27cf9293637dcc3dd5 not found: ID does not exist" Dec 02 10:54:18 crc kubenswrapper[4685]: I1202 10:54:18.019162 4685 scope.go:117] "RemoveContainer" containerID="75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59" Dec 02 10:54:18 crc kubenswrapper[4685]: E1202 10:54:18.019657 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59\": container with ID starting with 75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59 not found: ID does not exist" containerID="75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59" Dec 02 10:54:18 crc kubenswrapper[4685]: I1202 10:54:18.019714 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59"} err="failed to get container status \"75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59\": rpc error: code = NotFound desc = could not find container \"75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59\": container with ID starting with 75d0edc6c9b02b9a87c8de0ffdfa7cf0c1ed4f07565b394b4f7eff04ffde9a59 not found: ID does not exist" Dec 02 10:54:18 crc kubenswrapper[4685]: I1202 10:54:18.019742 4685 scope.go:117] "RemoveContainer" containerID="6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0" Dec 02 10:54:18 crc kubenswrapper[4685]: E1202 10:54:18.020013 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0\": container with ID starting with 6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0 not found: ID does not exist" containerID="6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0" Dec 02 10:54:18 crc kubenswrapper[4685]: I1202 10:54:18.020046 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0"} err="failed to get container status \"6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0\": rpc error: code = NotFound desc = could not find container \"6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0\": container with ID starting with 6d1cdc3f952a869007c17866183c820b004331ddfb24ba48a3a24a9a2ac7fde0 not found: ID does not exist" Dec 02 10:54:19 crc kubenswrapper[4685]: I1202 10:54:19.911789 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" path="/var/lib/kubelet/pods/4edc8dd0-5012-4d21-b6de-61ddaf58faf0/volumes" Dec 02 10:54:21 crc kubenswrapper[4685]: I1202 10:54:21.899597 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:54:21 crc kubenswrapper[4685]: E1202 10:54:21.900152 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:54:34 crc kubenswrapper[4685]: I1202 10:54:34.901240 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:54:34 crc kubenswrapper[4685]: E1202 10:54:34.902076 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:54:40 crc kubenswrapper[4685]: I1202 10:54:40.169475 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56bd4844cd-s5f2m_746491bf-6d00-4370-a7ba-740687bd6faa/barbican-api/0.log" Dec 02 10:54:40 crc kubenswrapper[4685]: I1202 10:54:40.301263 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56bd4844cd-s5f2m_746491bf-6d00-4370-a7ba-740687bd6faa/barbican-api-log/0.log" Dec 02 10:54:40 crc kubenswrapper[4685]: I1202 10:54:40.411368 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b57b745f4-h769r_48d319ac-7ed6-48dd-b934-91833d81dd44/barbican-keystone-listener/0.log" Dec 02 10:54:40 crc kubenswrapper[4685]: I1202 10:54:40.497920 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b57b745f4-h769r_48d319ac-7ed6-48dd-b934-91833d81dd44/barbican-keystone-listener-log/0.log" Dec 02 10:54:40 crc kubenswrapper[4685]: I1202 10:54:40.529879 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cd49d95f9-sdvx2_f2daca7c-e2f6-4e26-a1d8-259d2123c2d6/barbican-worker/0.log" Dec 02 10:54:40 crc kubenswrapper[4685]: I1202 10:54:40.640549 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cd49d95f9-sdvx2_f2daca7c-e2f6-4e26-a1d8-259d2123c2d6/barbican-worker-log/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.053508 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm_d8a0aadd-4e71-45c1-a810-fae1955f590f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.069675 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/ceilometer-central-agent/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.223972 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/ceilometer-notification-agent/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.281745 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/proxy-httpd/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.309748 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/sg-core/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.484876 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_62f488e9-6105-488d-bb01-f612db6e1fae/cinder-api/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.504574 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_62f488e9-6105-488d-bb01-f612db6e1fae/cinder-api-log/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.670616 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_880ba69c-db0d-4fab-b46d-45c7e8915684/cinder-scheduler/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.743992 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_880ba69c-db0d-4fab-b46d-45c7e8915684/probe/0.log" Dec 02 10:54:41 crc kubenswrapper[4685]: I1202 10:54:41.818125 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-899q5_fb248306-2b41-458e-9127-987af525ae12/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.037435 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6_459c09ff-049f-4edf-b41c-dc203f6527dc/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.055261 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-gpxbh_7e14e196-7cc2-49ba-8fd9-fdafa0c0727d/init/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.234810 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-gpxbh_7e14e196-7cc2-49ba-8fd9-fdafa0c0727d/init/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.317589 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-bpd44_1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.353403 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-gpxbh_7e14e196-7cc2-49ba-8fd9-fdafa0c0727d/dnsmasq-dns/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.534684 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ed1ab1f1-1e32-439f-91cf-ba12aca0273a/glance-httpd/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.556684 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ed1ab1f1-1e32-439f-91cf-ba12aca0273a/glance-log/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.721450 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37ce4a0b-7871-4784-a19e-36bd7b62ebbe/glance-httpd/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.731258 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37ce4a0b-7871-4784-a19e-36bd7b62ebbe/glance-log/0.log" Dec 02 10:54:42 crc kubenswrapper[4685]: I1202 10:54:42.917855 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-79bf856964-lh2w8_3d251819-b2e8-4cc5-b56c-977ea549bf2f/horizon/0.log" Dec 02 10:54:43 crc kubenswrapper[4685]: I1202 10:54:43.094206 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw_b8bea067-fabf-4a0b-b873-05104a785c39/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:43 crc kubenswrapper[4685]: I1202 10:54:43.159188 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-79bf856964-lh2w8_3d251819-b2e8-4cc5-b56c-977ea549bf2f/horizon-log/0.log" Dec 02 10:54:43 crc kubenswrapper[4685]: I1202 10:54:43.296382 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-t7jvq_e08815ae-1633-46ac-85ce-3aa867348763/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:43 crc kubenswrapper[4685]: I1202 10:54:43.425650 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6549967485-7j47k_8f4445dd-b293-4beb-af28-3d6dcf902a94/keystone-api/0.log" Dec 02 10:54:43 crc kubenswrapper[4685]: I1202 10:54:43.532642 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c/kube-state-metrics/0.log" Dec 02 10:54:43 crc kubenswrapper[4685]: I1202 10:54:43.730309 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm_5a0df3ff-af5b-4aa9-b108-cc65a1f43571/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:44 crc kubenswrapper[4685]: I1202 10:54:44.059229 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6dc8d84c85-x99fw_c4743ed7-a0f7-46b1-b0d7-50828835440e/neutron-api/0.log" Dec 02 10:54:44 crc kubenswrapper[4685]: I1202 10:54:44.126501 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6dc8d84c85-x99fw_c4743ed7-a0f7-46b1-b0d7-50828835440e/neutron-httpd/0.log" Dec 02 10:54:44 crc kubenswrapper[4685]: I1202 10:54:44.352304 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52_9fc6409b-b597-4290-9d9d-313fa733ddf7/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:44 crc kubenswrapper[4685]: I1202 10:54:44.850323 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2/nova-api-api/0.log" Dec 02 10:54:44 crc kubenswrapper[4685]: I1202 10:54:44.851830 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2/nova-api-log/0.log" Dec 02 10:54:44 crc kubenswrapper[4685]: I1202 10:54:44.880746 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_587a88c2-acbc-4104-959f-8dbc52f511de/nova-cell0-conductor-conductor/0.log" Dec 02 10:54:45 crc kubenswrapper[4685]: I1202 10:54:45.210812 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_009e6abc-834a-4487-b70d-0ec6e64d994d/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 10:54:45 crc kubenswrapper[4685]: I1202 10:54:45.233290 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_6d403a63-e543-4bc9-8f38-daaee1ceb4e6/nova-cell1-conductor-conductor/0.log" Dec 02 10:54:45 crc kubenswrapper[4685]: I1202 10:54:45.805916 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-t8bqh_ab586931-b7c5-450d-831c-ca05dbb865ad/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:45 crc kubenswrapper[4685]: I1202 10:54:45.852413 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_2f14ae08-f9e6-41bc-bb0c-9e6450267d63/nova-metadata-log/0.log" Dec 02 10:54:46 crc kubenswrapper[4685]: I1202 10:54:46.142282 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b8b721a8-0f97-4892-8c89-56382988595e/nova-scheduler-scheduler/0.log" Dec 02 10:54:46 crc kubenswrapper[4685]: I1202 10:54:46.254492 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_67ed831e-7122-4e6f-a320-51df1849c1d7/mysql-bootstrap/0.log" Dec 02 10:54:46 crc kubenswrapper[4685]: I1202 10:54:46.425471 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_67ed831e-7122-4e6f-a320-51df1849c1d7/galera/0.log" Dec 02 10:54:46 crc kubenswrapper[4685]: I1202 10:54:46.505959 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_67ed831e-7122-4e6f-a320-51df1849c1d7/mysql-bootstrap/0.log" Dec 02 10:54:46 crc kubenswrapper[4685]: I1202 10:54:46.726173 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_2f14ae08-f9e6-41bc-bb0c-9e6450267d63/nova-metadata-metadata/0.log" Dec 02 10:54:46 crc kubenswrapper[4685]: I1202 10:54:46.766223 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_13cc5021-b162-434b-8ae9-d3781b6f421e/mysql-bootstrap/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.021915 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_13cc5021-b162-434b-8ae9-d3781b6f421e/mysql-bootstrap/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.069992 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_13cc5021-b162-434b-8ae9-d3781b6f421e/galera/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.110376 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_62bb7fcc-d4cc-4b1d-b29a-98d6f3441731/openstackclient/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.441136 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-2sntp_ee7d3162-98e7-4af9-aad0-2098e23d1743/ovn-controller/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.447982 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-98qz8_12ca7abe-455f-4bfc-9da9-420462c92e69/openstack-network-exporter/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.669103 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovsdb-server-init/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.858066 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovsdb-server-init/0.log" Dec 02 10:54:47 crc kubenswrapper[4685]: I1202 10:54:47.975898 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovs-vswitchd/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.034141 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovsdb-server/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.257141 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-xd6j6_56e7d4c2-fb54-4104-ad31-0d90ceafd0ff/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.388830 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a8e44cc8-073c-4db3-af8b-c6b18bb2c808/openstack-network-exporter/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.426951 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a8e44cc8-073c-4db3-af8b-c6b18bb2c808/ovn-northd/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.493488 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0875b84e-91b6-4092-b8b3-a75abd86728d/openstack-network-exporter/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.671828 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0875b84e-91b6-4092-b8b3-a75abd86728d/ovsdbserver-nb/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.785486 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03278bdb-7697-4dd0-b482-97b93aa055ba/openstack-network-exporter/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.881883 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03278bdb-7697-4dd0-b482-97b93aa055ba/ovsdbserver-sb/0.log" Dec 02 10:54:48 crc kubenswrapper[4685]: I1202 10:54:48.899664 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:54:48 crc kubenswrapper[4685]: E1202 10:54:48.899857 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:54:49 crc kubenswrapper[4685]: I1202 10:54:49.137028 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-9f589dd8-wf2cx_1af68409-9c33-470d-96bd-abf8eb121c9d/placement-log/0.log" Dec 02 10:54:49 crc kubenswrapper[4685]: I1202 10:54:49.147661 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-9f589dd8-wf2cx_1af68409-9c33-470d-96bd-abf8eb121c9d/placement-api/0.log" Dec 02 10:54:49 crc kubenswrapper[4685]: I1202 10:54:49.606169 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_79bd4baf-4a03-43d7-8b3b-3a632474694e/setup-container/0.log" Dec 02 10:54:49 crc kubenswrapper[4685]: I1202 10:54:49.894776 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_79bd4baf-4a03-43d7-8b3b-3a632474694e/setup-container/0.log" Dec 02 10:54:49 crc kubenswrapper[4685]: I1202 10:54:49.986102 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_79bd4baf-4a03-43d7-8b3b-3a632474694e/rabbitmq/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.015920 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_622e7d58-a7d5-4898-a94b-ac66e3d0ee7f/setup-container/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.231654 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_622e7d58-a7d5-4898-a94b-ac66e3d0ee7f/setup-container/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.314677 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_622e7d58-a7d5-4898-a94b-ac66e3d0ee7f/rabbitmq/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.414476 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4_a639c48a-4ce5-45e3-ae7a-22672b81443f/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.530658 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-8kv9r_d80fcbde-80c5-4c5c-a42d-f95348702600/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.721464 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp_4c22e2d6-7984-4d74-9202-ea57de627392/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:50 crc kubenswrapper[4685]: I1202 10:54:50.775731 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-lc6xp_003cc841-bf83-4ac8-8b56-f6d451bff580/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.066930 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-2md5s_d47b11b2-65de-45ac-9395-81ccca27d279/ssh-known-hosts-edpm-deployment/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.315850 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-586f964d77-g52q4_4ad077a4-4937-4de8-9da4-ca0f58f3adef/proxy-httpd/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.367023 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-586f964d77-g52q4_4ad077a4-4937-4de8-9da4-ca0f58f3adef/proxy-server/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.402292 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-nbrzh_0b83b134-73c4-447e-99a2-a49c814e589c/swift-ring-rebalance/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.633937 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-auditor/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.754430 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-reaper/0.log" Dec 02 10:54:51 crc kubenswrapper[4685]: I1202 10:54:51.847077 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-replicator/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.035096 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-auditor/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.052207 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-server/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.142697 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-replicator/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.182986 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-server/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.264340 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-updater/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.296291 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-auditor/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.380366 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-expirer/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.465503 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-replicator/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.531201 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-updater/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.551541 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-server/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.632044 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/rsync/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.749998 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/swift-recon-cron/0.log" Dec 02 10:54:52 crc kubenswrapper[4685]: I1202 10:54:52.863546 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr_cefd0695-3aac-4aa9-b035-f6e6d22d79f7/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:54:53 crc kubenswrapper[4685]: I1202 10:54:53.017980 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_27023d6a-a566-4be7-9244-915ce922dd05/tempest-tests-tempest-tests-runner/0.log" Dec 02 10:54:53 crc kubenswrapper[4685]: I1202 10:54:53.160705 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_0063a6a8-b535-4747-b1bd-8680aea722af/test-operator-logs-container/0.log" Dec 02 10:54:53 crc kubenswrapper[4685]: I1202 10:54:53.619653 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn_13eea0ea-5642-4780-9aaa-dd0148f05809/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 10:55:01 crc kubenswrapper[4685]: I1202 10:55:01.899363 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:55:01 crc kubenswrapper[4685]: E1202 10:55:01.900096 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:55:06 crc kubenswrapper[4685]: I1202 10:55:06.527517 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_449de76a-491d-4874-8b46-df24eb5c628a/memcached/0.log" Dec 02 10:55:12 crc kubenswrapper[4685]: I1202 10:55:12.899821 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:55:12 crc kubenswrapper[4685]: E1202 10:55:12.901755 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.238464 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vgbxs"] Dec 02 10:55:17 crc kubenswrapper[4685]: E1202 10:55:17.246010 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="registry-server" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.246113 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="registry-server" Dec 02 10:55:17 crc kubenswrapper[4685]: E1202 10:55:17.246198 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b78f7e1-a624-439f-af6a-0f28aebb23b3" containerName="container-00" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.246254 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b78f7e1-a624-439f-af6a-0f28aebb23b3" containerName="container-00" Dec 02 10:55:17 crc kubenswrapper[4685]: E1202 10:55:17.246326 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="extract-utilities" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.246387 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="extract-utilities" Dec 02 10:55:17 crc kubenswrapper[4685]: E1202 10:55:17.246473 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="extract-content" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.246538 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="extract-content" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.246978 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4edc8dd0-5012-4d21-b6de-61ddaf58faf0" containerName="registry-server" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.247070 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b78f7e1-a624-439f-af6a-0f28aebb23b3" containerName="container-00" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.250332 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.263188 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgbxs"] Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.356478 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb8fq\" (UniqueName: \"kubernetes.io/projected/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-kube-api-access-jb8fq\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.356817 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-utilities\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.356988 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-catalog-content\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.458515 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-catalog-content\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.458827 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb8fq\" (UniqueName: \"kubernetes.io/projected/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-kube-api-access-jb8fq\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.459011 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-catalog-content\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.459841 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-utilities\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.460462 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-utilities\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.483086 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb8fq\" (UniqueName: \"kubernetes.io/projected/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-kube-api-access-jb8fq\") pod \"certified-operators-vgbxs\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:17 crc kubenswrapper[4685]: I1202 10:55:17.577819 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:18 crc kubenswrapper[4685]: I1202 10:55:18.157272 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgbxs"] Dec 02 10:55:18 crc kubenswrapper[4685]: I1202 10:55:18.527124 4685 generic.go:334] "Generic (PLEG): container finished" podID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerID="43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b" exitCode=0 Dec 02 10:55:18 crc kubenswrapper[4685]: I1202 10:55:18.527230 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerDied","Data":"43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b"} Dec 02 10:55:18 crc kubenswrapper[4685]: I1202 10:55:18.527538 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerStarted","Data":"a8ffaa280f67154a2321fcaa29c2a391e63b35213ba443cb825b7606ca3b4bcb"} Dec 02 10:55:19 crc kubenswrapper[4685]: I1202 10:55:19.538372 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerStarted","Data":"57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec"} Dec 02 10:55:20 crc kubenswrapper[4685]: I1202 10:55:20.550329 4685 generic.go:334] "Generic (PLEG): container finished" podID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerID="57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec" exitCode=0 Dec 02 10:55:20 crc kubenswrapper[4685]: I1202 10:55:20.550411 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerDied","Data":"57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec"} Dec 02 10:55:21 crc kubenswrapper[4685]: I1202 10:55:21.577670 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerStarted","Data":"5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5"} Dec 02 10:55:21 crc kubenswrapper[4685]: I1202 10:55:21.610167 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vgbxs" podStartSLOduration=2.075869613 podStartE2EDuration="4.610140846s" podCreationTimestamp="2025-12-02 10:55:17 +0000 UTC" firstStartedPulling="2025-12-02 10:55:18.529641239 +0000 UTC m=+3210.901415393" lastFinishedPulling="2025-12-02 10:55:21.063912472 +0000 UTC m=+3213.435686626" observedRunningTime="2025-12-02 10:55:21.598907163 +0000 UTC m=+3213.970681337" watchObservedRunningTime="2025-12-02 10:55:21.610140846 +0000 UTC m=+3213.981915000" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.014085 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/util/0.log" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.213462 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/pull/0.log" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.273807 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/util/0.log" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.304443 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/pull/0.log" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.512707 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/util/0.log" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.536076 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/extract/0.log" Dec 02 10:55:25 crc kubenswrapper[4685]: I1202 10:55:25.536688 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/pull/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.082412 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7qgfl_544b6fe9-890e-4b17-8f8e-55f53d64fcf7/kube-rbac-proxy/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.125181 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7qgfl_544b6fe9-890e-4b17-8f8e-55f53d64fcf7/manager/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.135614 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-855qm_cccf3baf-f063-4962-8856-c80c78439b82/kube-rbac-proxy/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.287431 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-855qm_cccf3baf-f063-4962-8856-c80c78439b82/manager/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.382507 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-9s2nf_dd4f4fd1-38b6-4732-bab1-96a522d34e53/kube-rbac-proxy/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.430725 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-9s2nf_dd4f4fd1-38b6-4732-bab1-96a522d34e53/manager/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.661359 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-vn545_901441b3-91d9-4edf-8955-cd5514589dec/manager/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.701573 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-vn545_901441b3-91d9-4edf-8955-cd5514589dec/kube-rbac-proxy/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.769239 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xqspp_b16756f6-29e0-4e33-8e00-f7b0e193b958/kube-rbac-proxy/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.871038 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xqspp_b16756f6-29e0-4e33-8e00-f7b0e193b958/manager/0.log" Dec 02 10:55:26 crc kubenswrapper[4685]: I1202 10:55:26.913376 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-xnk7k_739ae795-6209-4881-8bf6-be391a810a86/kube-rbac-proxy/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.028507 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-xnk7k_739ae795-6209-4881-8bf6-be391a810a86/manager/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.136658 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-p62sl_36189be8-91c8-4b60-90d6-050a07ae86d3/kube-rbac-proxy/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.338099 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-p62sl_36189be8-91c8-4b60-90d6-050a07ae86d3/manager/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.387854 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vz7bq_bb0c5cd2-9459-4e31-8613-f758d330dce2/manager/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.420867 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vz7bq_bb0c5cd2-9459-4e31-8613-f758d330dce2/kube-rbac-proxy/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.578836 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.578882 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.580703 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-5sn8t_6513f83d-2079-477d-8976-68cb969806fe/kube-rbac-proxy/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.623990 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.672919 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.780453 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-5sn8t_6513f83d-2079-477d-8976-68cb969806fe/manager/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.837817 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-rngtk_59c1b39b-2153-4c88-9229-0e951b086fdd/kube-rbac-proxy/0.log" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.870427 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgbxs"] Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.905977 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:55:27 crc kubenswrapper[4685]: E1202 10:55:27.906537 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 10:55:27 crc kubenswrapper[4685]: I1202 10:55:27.955212 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-rngtk_59c1b39b-2153-4c88-9229-0e951b086fdd/manager/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.006116 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9l2jq_bc4b6647-31ea-45d0-ac59-b8b1cef80aeb/kube-rbac-proxy/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.062687 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9l2jq_bc4b6647-31ea-45d0-ac59-b8b1cef80aeb/manager/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.212902 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7tmr8_0a4bb15b-4ada-4698-9747-dfa600f319d3/kube-rbac-proxy/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.272504 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7tmr8_0a4bb15b-4ada-4698-9747-dfa600f319d3/manager/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.524225 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bqctl_e88f923e-f7cc-4292-a49c-483be1e7848e/kube-rbac-proxy/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.526724 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bqctl_e88f923e-f7cc-4292-a49c-483be1e7848e/manager/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.666757 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-vx7jq_20b551a4-85bc-4ecb-b502-08f844a6b911/kube-rbac-proxy/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.735894 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-vx7jq_20b551a4-85bc-4ecb-b502-08f844a6b911/manager/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.826484 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd49v57k_50904294-59b3-4a71-84f9-8f171cad02e2/kube-rbac-proxy/0.log" Dec 02 10:55:28 crc kubenswrapper[4685]: I1202 10:55:28.901386 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd49v57k_50904294-59b3-4a71-84f9-8f171cad02e2/manager/0.log" Dec 02 10:55:29 crc kubenswrapper[4685]: I1202 10:55:29.650266 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vgbxs" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="registry-server" containerID="cri-o://5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5" gracePeriod=2 Dec 02 10:55:29 crc kubenswrapper[4685]: I1202 10:55:29.666232 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8mbzs_16f31204-e29f-41f1-b560-814671c5967a/registry-server/0.log" Dec 02 10:55:29 crc kubenswrapper[4685]: I1202 10:55:29.669479 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-b866867cf-zf2zb_6e78ca06-e6a2-4973-a8df-593409527f5e/operator/0.log" Dec 02 10:55:29 crc kubenswrapper[4685]: I1202 10:55:29.911653 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-njgr6_30be1eaf-4d63-4fb6-9372-0857432b6b73/kube-rbac-proxy/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.131440 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-njgr6_30be1eaf-4d63-4fb6-9372-0857432b6b73/manager/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.157422 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.223759 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-j5hhn_1177803f-ea41-40ed-8b1d-58c6761363f0/kube-rbac-proxy/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.262509 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-j5hhn_1177803f-ea41-40ed-8b1d-58c6761363f0/manager/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.340959 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-utilities\") pod \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.341147 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jb8fq\" (UniqueName: \"kubernetes.io/projected/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-kube-api-access-jb8fq\") pod \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.341209 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-catalog-content\") pod \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\" (UID: \"b253d13b-6c7e-4b7f-a032-e27232ae9b1a\") " Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.341947 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-utilities" (OuterVolumeSpecName: "utilities") pod "b253d13b-6c7e-4b7f-a032-e27232ae9b1a" (UID: "b253d13b-6c7e-4b7f-a032-e27232ae9b1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.360835 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-kube-api-access-jb8fq" (OuterVolumeSpecName: "kube-api-access-jb8fq") pod "b253d13b-6c7e-4b7f-a032-e27232ae9b1a" (UID: "b253d13b-6c7e-4b7f-a032-e27232ae9b1a"). InnerVolumeSpecName "kube-api-access-jb8fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.384118 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b253d13b-6c7e-4b7f-a032-e27232ae9b1a" (UID: "b253d13b-6c7e-4b7f-a032-e27232ae9b1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.443130 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.443161 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jb8fq\" (UniqueName: \"kubernetes.io/projected/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-kube-api-access-jb8fq\") on node \"crc\" DevicePath \"\"" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.443173 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b253d13b-6c7e-4b7f-a032-e27232ae9b1a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.515100 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-5pwml_226039ae-3d4c-41e0-8a24-985eb9f63d27/operator/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.581296 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-695b4bc5dc-jxvqs_e55320ae-8458-4802-aa07-e406f1b58fce/kube-rbac-proxy/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.615376 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-59c586c68c-cvbtm_aa95a5ac-c9b5-4850-8201-b696ed655570/manager/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.660039 4685 generic.go:334] "Generic (PLEG): container finished" podID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerID="5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5" exitCode=0 Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.660082 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerDied","Data":"5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5"} Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.660101 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgbxs" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.660122 4685 scope.go:117] "RemoveContainer" containerID="5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.660109 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgbxs" event={"ID":"b253d13b-6c7e-4b7f-a032-e27232ae9b1a","Type":"ContainerDied","Data":"a8ffaa280f67154a2321fcaa29c2a391e63b35213ba443cb825b7606ca3b4bcb"} Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.677829 4685 scope.go:117] "RemoveContainer" containerID="57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.707076 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgbxs"] Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.715187 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vgbxs"] Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.715320 4685 scope.go:117] "RemoveContainer" containerID="43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.732258 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-695b4bc5dc-jxvqs_e55320ae-8458-4802-aa07-e406f1b58fce/manager/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.749460 4685 scope.go:117] "RemoveContainer" containerID="5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5" Dec 02 10:55:30 crc kubenswrapper[4685]: E1202 10:55:30.750017 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5\": container with ID starting with 5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5 not found: ID does not exist" containerID="5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.750042 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5"} err="failed to get container status \"5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5\": rpc error: code = NotFound desc = could not find container \"5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5\": container with ID starting with 5cf45611400091051ab21987f00212d8802800693e2b459d22a1ea69c233a3d5 not found: ID does not exist" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.750064 4685 scope.go:117] "RemoveContainer" containerID="57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec" Dec 02 10:55:30 crc kubenswrapper[4685]: E1202 10:55:30.750368 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec\": container with ID starting with 57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec not found: ID does not exist" containerID="57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.750404 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec"} err="failed to get container status \"57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec\": rpc error: code = NotFound desc = could not find container \"57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec\": container with ID starting with 57d5b1755d3849bfe5fe6b4b9164ca7c8232c9eb88f40e9955270757beaa4fec not found: ID does not exist" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.750428 4685 scope.go:117] "RemoveContainer" containerID="43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b" Dec 02 10:55:30 crc kubenswrapper[4685]: E1202 10:55:30.752611 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b\": container with ID starting with 43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b not found: ID does not exist" containerID="43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.752645 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b"} err="failed to get container status \"43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b\": rpc error: code = NotFound desc = could not find container \"43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b\": container with ID starting with 43f8859ea617465c157edce42d508c5ecb8b9e0ca3eda83e4840f84ae097846b not found: ID does not exist" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.806304 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-llc6z_cf795d28-4ac2-44de-9ca7-10ef8788eb80/kube-rbac-proxy/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.843715 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-llc6z_cf795d28-4ac2-44de-9ca7-10ef8788eb80/manager/0.log" Dec 02 10:55:30 crc kubenswrapper[4685]: I1202 10:55:30.955511 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-zsxv4_8c138433-1126-4ffa-a017-19740e566084/kube-rbac-proxy/0.log" Dec 02 10:55:31 crc kubenswrapper[4685]: I1202 10:55:31.015342 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-zsxv4_8c138433-1126-4ffa-a017-19740e566084/manager/0.log" Dec 02 10:55:31 crc kubenswrapper[4685]: I1202 10:55:31.114267 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2dpvv_01015eb1-ac2f-4bc1-81d3-145ce402db5e/kube-rbac-proxy/0.log" Dec 02 10:55:31 crc kubenswrapper[4685]: I1202 10:55:31.199470 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2dpvv_01015eb1-ac2f-4bc1-81d3-145ce402db5e/manager/0.log" Dec 02 10:55:31 crc kubenswrapper[4685]: I1202 10:55:31.908728 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" path="/var/lib/kubelet/pods/b253d13b-6c7e-4b7f-a032-e27232ae9b1a/volumes" Dec 02 10:55:42 crc kubenswrapper[4685]: I1202 10:55:42.900294 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 10:55:43 crc kubenswrapper[4685]: I1202 10:55:43.761241 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"20cc17afc70d7c15755ebf3185cf6db25f125900540259ba4fbfe1ecb29d6d64"} Dec 02 10:55:50 crc kubenswrapper[4685]: I1202 10:55:50.769190 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rpwnx_70ecf800-4f19-4a60-834f-d72f0a1d2d37/control-plane-machine-set-operator/0.log" Dec 02 10:55:50 crc kubenswrapper[4685]: I1202 10:55:50.950967 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-5glrx_2dee0464-6eab-44a6-a33d-3b6096319ecf/kube-rbac-proxy/0.log" Dec 02 10:55:50 crc kubenswrapper[4685]: I1202 10:55:50.996464 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-5glrx_2dee0464-6eab-44a6-a33d-3b6096319ecf/machine-api-operator/0.log" Dec 02 10:56:04 crc kubenswrapper[4685]: I1202 10:56:04.233449 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-lt7gc_f2c74be0-fa96-4153-8159-bd67e214d601/cert-manager-controller/0.log" Dec 02 10:56:04 crc kubenswrapper[4685]: I1202 10:56:04.300233 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-2dnjg_9e280929-1daa-4ff4-b690-1192d259178d/cert-manager-cainjector/0.log" Dec 02 10:56:04 crc kubenswrapper[4685]: I1202 10:56:04.456866 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-6gfx7_379893b2-eb01-49f9-b70c-9e459c6c6ed0/cert-manager-webhook/0.log" Dec 02 10:56:18 crc kubenswrapper[4685]: I1202 10:56:18.143647 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-dtq9q_c8a900b2-3134-445e-8349-4ffab8e264bb/nmstate-console-plugin/0.log" Dec 02 10:56:18 crc kubenswrapper[4685]: I1202 10:56:18.430587 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wbwxh_dfaa292c-cbfc-495c-8b18-768606608d14/nmstate-handler/0.log" Dec 02 10:56:18 crc kubenswrapper[4685]: I1202 10:56:18.493984 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zshhw_b7d57eeb-24b9-461f-ab36-86ffcf831603/nmstate-metrics/0.log" Dec 02 10:56:18 crc kubenswrapper[4685]: I1202 10:56:18.507630 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zshhw_b7d57eeb-24b9-461f-ab36-86ffcf831603/kube-rbac-proxy/0.log" Dec 02 10:56:18 crc kubenswrapper[4685]: I1202 10:56:18.669098 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-wklzz_4b250238-0336-4009-b01e-2a469bf59e33/nmstate-operator/0.log" Dec 02 10:56:18 crc kubenswrapper[4685]: I1202 10:56:18.692176 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-2khp8_8d950d2d-44b3-4441-9434-11fc6a39dca4/nmstate-webhook/0.log" Dec 02 10:56:33 crc kubenswrapper[4685]: I1202 10:56:33.581995 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-z8v2s_acd555b8-75ea-48d0-a942-c8762e15f37c/kube-rbac-proxy/0.log" Dec 02 10:56:33 crc kubenswrapper[4685]: I1202 10:56:33.649489 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-z8v2s_acd555b8-75ea-48d0-a942-c8762e15f37c/controller/0.log" Dec 02 10:56:33 crc kubenswrapper[4685]: I1202 10:56:33.776615 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.053013 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.068239 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.080258 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.118510 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.315615 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.315789 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.335101 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.393118 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.553192 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.571450 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.585144 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.619190 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/controller/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.859097 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/frr-metrics/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.923467 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/kube-rbac-proxy/0.log" Dec 02 10:56:34 crc kubenswrapper[4685]: I1202 10:56:34.961315 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/kube-rbac-proxy-frr/0.log" Dec 02 10:56:35 crc kubenswrapper[4685]: I1202 10:56:35.223692 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-gdnpk_aa6b58de-e46c-4019-9a27-2991ad81429f/frr-k8s-webhook-server/0.log" Dec 02 10:56:35 crc kubenswrapper[4685]: I1202 10:56:35.228068 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/reloader/0.log" Dec 02 10:56:35 crc kubenswrapper[4685]: I1202 10:56:35.588241 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-685cd4986b-57sm9_9edd243d-18c6-4345-b31d-a9825a3ad745/manager/0.log" Dec 02 10:56:35 crc kubenswrapper[4685]: I1202 10:56:35.833471 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/frr/0.log" Dec 02 10:56:35 crc kubenswrapper[4685]: I1202 10:56:35.850253 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2mprm_e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7/kube-rbac-proxy/0.log" Dec 02 10:56:35 crc kubenswrapper[4685]: I1202 10:56:35.865759 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6db9d48979-ljd69_7883a47e-8804-4094-8e91-73f854896283/webhook-server/0.log" Dec 02 10:56:36 crc kubenswrapper[4685]: I1202 10:56:36.312456 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2mprm_e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7/speaker/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.256965 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/util/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.481600 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/util/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.487578 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/pull/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.489755 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/pull/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.668135 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/util/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.723658 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/extract/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.757963 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/pull/0.log" Dec 02 10:56:49 crc kubenswrapper[4685]: I1202 10:56:49.893277 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/util/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.064386 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/util/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.095653 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/pull/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.101139 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/pull/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.282192 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/pull/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.299263 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/extract/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.305288 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/util/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.481599 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-utilities/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.638837 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-utilities/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.662359 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-content/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.700187 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-content/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.834267 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-utilities/0.log" Dec 02 10:56:50 crc kubenswrapper[4685]: I1202 10:56:50.864298 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-content/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.199461 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-utilities/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.270270 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/registry-server/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.321697 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-utilities/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.390048 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-content/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.497306 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-content/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.608311 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-utilities/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.672000 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-content/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.832441 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/registry-server/0.log" Dec 02 10:56:51 crc kubenswrapper[4685]: I1202 10:56:51.860378 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hxbft_267f61a3-d674-4855-9f76-b5c7edb14ed1/marketplace-operator/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.040596 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-utilities/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.226487 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-content/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.261795 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-utilities/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.306698 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-content/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.507426 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-utilities/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.574033 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-content/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.676394 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/registry-server/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.691622 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-utilities/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.939900 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-content/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.968948 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-content/0.log" Dec 02 10:56:52 crc kubenswrapper[4685]: I1202 10:56:52.981146 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-utilities/0.log" Dec 02 10:56:53 crc kubenswrapper[4685]: I1202 10:56:53.106478 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-utilities/0.log" Dec 02 10:56:53 crc kubenswrapper[4685]: I1202 10:56:53.133599 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-content/0.log" Dec 02 10:56:53 crc kubenswrapper[4685]: I1202 10:56:53.651545 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/registry-server/0.log" Dec 02 10:58:12 crc kubenswrapper[4685]: I1202 10:58:12.148025 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:58:12 crc kubenswrapper[4685]: I1202 10:58:12.148685 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:58:42 crc kubenswrapper[4685]: I1202 10:58:42.148189 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:58:42 crc kubenswrapper[4685]: I1202 10:58:42.148791 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:58:42 crc kubenswrapper[4685]: I1202 10:58:42.430313 4685 generic.go:334] "Generic (PLEG): container finished" podID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerID="1b8a675a11c49bb220540232d1c7addea1c8fdd8c53d3b0dce79658a8e85edb0" exitCode=0 Dec 02 10:58:42 crc kubenswrapper[4685]: I1202 10:58:42.430377 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wh8tx/must-gather-8tghj" event={"ID":"fdccfbcc-d473-4935-ad48-669a64b86c9f","Type":"ContainerDied","Data":"1b8a675a11c49bb220540232d1c7addea1c8fdd8c53d3b0dce79658a8e85edb0"} Dec 02 10:58:42 crc kubenswrapper[4685]: I1202 10:58:42.431474 4685 scope.go:117] "RemoveContainer" containerID="1b8a675a11c49bb220540232d1c7addea1c8fdd8c53d3b0dce79658a8e85edb0" Dec 02 10:58:42 crc kubenswrapper[4685]: I1202 10:58:42.864836 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wh8tx_must-gather-8tghj_fdccfbcc-d473-4935-ad48-669a64b86c9f/gather/0.log" Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.298156 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wh8tx/must-gather-8tghj"] Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.299081 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wh8tx/must-gather-8tghj" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="copy" containerID="cri-o://88602fb6255545abbb669ad09a1234a4b4b7a572f47c56eea61fba04e751db4a" gracePeriod=2 Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.309766 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wh8tx/must-gather-8tghj"] Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.515933 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wh8tx_must-gather-8tghj_fdccfbcc-d473-4935-ad48-669a64b86c9f/copy/0.log" Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.516437 4685 generic.go:334] "Generic (PLEG): container finished" podID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerID="88602fb6255545abbb669ad09a1234a4b4b7a572f47c56eea61fba04e751db4a" exitCode=143 Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.751089 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wh8tx_must-gather-8tghj_fdccfbcc-d473-4935-ad48-669a64b86c9f/copy/0.log" Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.751611 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.823785 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fdccfbcc-d473-4935-ad48-669a64b86c9f-must-gather-output\") pod \"fdccfbcc-d473-4935-ad48-669a64b86c9f\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.823834 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs7vs\" (UniqueName: \"kubernetes.io/projected/fdccfbcc-d473-4935-ad48-669a64b86c9f-kube-api-access-rs7vs\") pod \"fdccfbcc-d473-4935-ad48-669a64b86c9f\" (UID: \"fdccfbcc-d473-4935-ad48-669a64b86c9f\") " Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.845498 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdccfbcc-d473-4935-ad48-669a64b86c9f-kube-api-access-rs7vs" (OuterVolumeSpecName: "kube-api-access-rs7vs") pod "fdccfbcc-d473-4935-ad48-669a64b86c9f" (UID: "fdccfbcc-d473-4935-ad48-669a64b86c9f"). InnerVolumeSpecName "kube-api-access-rs7vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 10:58:51 crc kubenswrapper[4685]: I1202 10:58:51.926020 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs7vs\" (UniqueName: \"kubernetes.io/projected/fdccfbcc-d473-4935-ad48-669a64b86c9f-kube-api-access-rs7vs\") on node \"crc\" DevicePath \"\"" Dec 02 10:58:52 crc kubenswrapper[4685]: I1202 10:58:52.004233 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdccfbcc-d473-4935-ad48-669a64b86c9f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "fdccfbcc-d473-4935-ad48-669a64b86c9f" (UID: "fdccfbcc-d473-4935-ad48-669a64b86c9f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 10:58:52 crc kubenswrapper[4685]: I1202 10:58:52.027517 4685 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/fdccfbcc-d473-4935-ad48-669a64b86c9f-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 10:58:52 crc kubenswrapper[4685]: I1202 10:58:52.529086 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wh8tx_must-gather-8tghj_fdccfbcc-d473-4935-ad48-669a64b86c9f/copy/0.log" Dec 02 10:58:52 crc kubenswrapper[4685]: I1202 10:58:52.529531 4685 scope.go:117] "RemoveContainer" containerID="88602fb6255545abbb669ad09a1234a4b4b7a572f47c56eea61fba04e751db4a" Dec 02 10:58:52 crc kubenswrapper[4685]: I1202 10:58:52.529620 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wh8tx/must-gather-8tghj" Dec 02 10:58:52 crc kubenswrapper[4685]: I1202 10:58:52.560656 4685 scope.go:117] "RemoveContainer" containerID="1b8a675a11c49bb220540232d1c7addea1c8fdd8c53d3b0dce79658a8e85edb0" Dec 02 10:58:53 crc kubenswrapper[4685]: I1202 10:58:53.911711 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" path="/var/lib/kubelet/pods/fdccfbcc-d473-4935-ad48-669a64b86c9f/volumes" Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.147810 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.148397 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.148450 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.149244 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"20cc17afc70d7c15755ebf3185cf6db25f125900540259ba4fbfe1ecb29d6d64"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.149310 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://20cc17afc70d7c15755ebf3185cf6db25f125900540259ba4fbfe1ecb29d6d64" gracePeriod=600 Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.742747 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="20cc17afc70d7c15755ebf3185cf6db25f125900540259ba4fbfe1ecb29d6d64" exitCode=0 Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.742820 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"20cc17afc70d7c15755ebf3185cf6db25f125900540259ba4fbfe1ecb29d6d64"} Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.743235 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece"} Dec 02 10:59:12 crc kubenswrapper[4685]: I1202 10:59:12.743267 4685 scope.go:117] "RemoveContainer" containerID="5ae96ba04042884e5d20cbfee436f96fde3e30e4d98832d8a49443e2b13eb990" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.162519 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9"] Dec 02 11:00:00 crc kubenswrapper[4685]: E1202 11:00:00.165101 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="extract-utilities" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.165243 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="extract-utilities" Dec 02 11:00:00 crc kubenswrapper[4685]: E1202 11:00:00.165343 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="registry-server" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.165422 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="registry-server" Dec 02 11:00:00 crc kubenswrapper[4685]: E1202 11:00:00.165516 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="gather" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.165619 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="gather" Dec 02 11:00:00 crc kubenswrapper[4685]: E1202 11:00:00.165721 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="extract-content" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.165798 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="extract-content" Dec 02 11:00:00 crc kubenswrapper[4685]: E1202 11:00:00.165889 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="copy" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.165964 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="copy" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.169697 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="b253d13b-6c7e-4b7f-a032-e27232ae9b1a" containerName="registry-server" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.170010 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="gather" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.170124 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdccfbcc-d473-4935-ad48-669a64b86c9f" containerName="copy" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.171102 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.176066 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9"] Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.181808 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.182035 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.255692 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gktwh\" (UniqueName: \"kubernetes.io/projected/5770a9a5-3c04-4ede-9212-989f276fa53c-kube-api-access-gktwh\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.255857 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5770a9a5-3c04-4ede-9212-989f276fa53c-secret-volume\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.255881 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5770a9a5-3c04-4ede-9212-989f276fa53c-config-volume\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.357805 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gktwh\" (UniqueName: \"kubernetes.io/projected/5770a9a5-3c04-4ede-9212-989f276fa53c-kube-api-access-gktwh\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.357970 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5770a9a5-3c04-4ede-9212-989f276fa53c-secret-volume\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.357996 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5770a9a5-3c04-4ede-9212-989f276fa53c-config-volume\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.359041 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5770a9a5-3c04-4ede-9212-989f276fa53c-config-volume\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.376585 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5770a9a5-3c04-4ede-9212-989f276fa53c-secret-volume\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.384712 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gktwh\" (UniqueName: \"kubernetes.io/projected/5770a9a5-3c04-4ede-9212-989f276fa53c-kube-api-access-gktwh\") pod \"collect-profiles-29411220-sz6r9\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.502953 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:00 crc kubenswrapper[4685]: I1202 11:00:00.987885 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9"] Dec 02 11:00:01 crc kubenswrapper[4685]: I1202 11:00:01.252272 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" event={"ID":"5770a9a5-3c04-4ede-9212-989f276fa53c","Type":"ContainerStarted","Data":"c884780f7f264ba5678a4ea7276d896d42a8f801f84b21876f41fd0c28151d08"} Dec 02 11:00:01 crc kubenswrapper[4685]: I1202 11:00:01.252615 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" event={"ID":"5770a9a5-3c04-4ede-9212-989f276fa53c","Type":"ContainerStarted","Data":"2fee52fad816b439e650cc7161a9008cc7ea082a74895aadb4a7519e4e022d45"} Dec 02 11:00:01 crc kubenswrapper[4685]: I1202 11:00:01.276525 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" podStartSLOduration=1.276509142 podStartE2EDuration="1.276509142s" podCreationTimestamp="2025-12-02 11:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:00:01.275291209 +0000 UTC m=+3493.647065373" watchObservedRunningTime="2025-12-02 11:00:01.276509142 +0000 UTC m=+3493.648283296" Dec 02 11:00:02 crc kubenswrapper[4685]: I1202 11:00:02.265721 4685 generic.go:334] "Generic (PLEG): container finished" podID="5770a9a5-3c04-4ede-9212-989f276fa53c" containerID="c884780f7f264ba5678a4ea7276d896d42a8f801f84b21876f41fd0c28151d08" exitCode=0 Dec 02 11:00:02 crc kubenswrapper[4685]: I1202 11:00:02.265779 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" event={"ID":"5770a9a5-3c04-4ede-9212-989f276fa53c","Type":"ContainerDied","Data":"c884780f7f264ba5678a4ea7276d896d42a8f801f84b21876f41fd0c28151d08"} Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.669355 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.820297 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gktwh\" (UniqueName: \"kubernetes.io/projected/5770a9a5-3c04-4ede-9212-989f276fa53c-kube-api-access-gktwh\") pod \"5770a9a5-3c04-4ede-9212-989f276fa53c\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.820467 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5770a9a5-3c04-4ede-9212-989f276fa53c-config-volume\") pod \"5770a9a5-3c04-4ede-9212-989f276fa53c\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.820510 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5770a9a5-3c04-4ede-9212-989f276fa53c-secret-volume\") pod \"5770a9a5-3c04-4ede-9212-989f276fa53c\" (UID: \"5770a9a5-3c04-4ede-9212-989f276fa53c\") " Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.821581 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5770a9a5-3c04-4ede-9212-989f276fa53c-config-volume" (OuterVolumeSpecName: "config-volume") pod "5770a9a5-3c04-4ede-9212-989f276fa53c" (UID: "5770a9a5-3c04-4ede-9212-989f276fa53c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.822002 4685 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5770a9a5-3c04-4ede-9212-989f276fa53c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.827079 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5770a9a5-3c04-4ede-9212-989f276fa53c-kube-api-access-gktwh" (OuterVolumeSpecName: "kube-api-access-gktwh") pod "5770a9a5-3c04-4ede-9212-989f276fa53c" (UID: "5770a9a5-3c04-4ede-9212-989f276fa53c"). InnerVolumeSpecName "kube-api-access-gktwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.828342 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5770a9a5-3c04-4ede-9212-989f276fa53c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5770a9a5-3c04-4ede-9212-989f276fa53c" (UID: "5770a9a5-3c04-4ede-9212-989f276fa53c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.923912 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gktwh\" (UniqueName: \"kubernetes.io/projected/5770a9a5-3c04-4ede-9212-989f276fa53c-kube-api-access-gktwh\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:03 crc kubenswrapper[4685]: I1202 11:00:03.923949 4685 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5770a9a5-3c04-4ede-9212-989f276fa53c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:04 crc kubenswrapper[4685]: I1202 11:00:04.291023 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" event={"ID":"5770a9a5-3c04-4ede-9212-989f276fa53c","Type":"ContainerDied","Data":"2fee52fad816b439e650cc7161a9008cc7ea082a74895aadb4a7519e4e022d45"} Dec 02 11:00:04 crc kubenswrapper[4685]: I1202 11:00:04.291092 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29411220-sz6r9" Dec 02 11:00:04 crc kubenswrapper[4685]: I1202 11:00:04.291124 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fee52fad816b439e650cc7161a9008cc7ea082a74895aadb4a7519e4e022d45" Dec 02 11:00:04 crc kubenswrapper[4685]: I1202 11:00:04.389270 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf"] Dec 02 11:00:04 crc kubenswrapper[4685]: I1202 11:00:04.400733 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29411175-k45vf"] Dec 02 11:00:05 crc kubenswrapper[4685]: I1202 11:00:05.915492 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87325cd0-927f-4761-a3db-1640fe3060c7" path="/var/lib/kubelet/pods/87325cd0-927f-4761-a3db-1640fe3060c7/volumes" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.391279 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-74vn9"] Dec 02 11:00:30 crc kubenswrapper[4685]: E1202 11:00:30.392512 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5770a9a5-3c04-4ede-9212-989f276fa53c" containerName="collect-profiles" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.392534 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="5770a9a5-3c04-4ede-9212-989f276fa53c" containerName="collect-profiles" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.392938 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="5770a9a5-3c04-4ede-9212-989f276fa53c" containerName="collect-profiles" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.395296 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.411266 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-74vn9"] Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.445391 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-catalog-content\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.445466 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhcqp\" (UniqueName: \"kubernetes.io/projected/92ebed65-5895-429c-abb7-eeefca5866bd-kube-api-access-xhcqp\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.445515 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-utilities\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.547625 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-catalog-content\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.547702 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhcqp\" (UniqueName: \"kubernetes.io/projected/92ebed65-5895-429c-abb7-eeefca5866bd-kube-api-access-xhcqp\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.547750 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-utilities\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.548437 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-catalog-content\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.548607 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-utilities\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.577955 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7gq"] Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.581161 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.587476 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhcqp\" (UniqueName: \"kubernetes.io/projected/92ebed65-5895-429c-abb7-eeefca5866bd-kube-api-access-xhcqp\") pod \"community-operators-74vn9\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.589795 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7gq"] Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.653934 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-utilities\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.654076 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drhq9\" (UniqueName: \"kubernetes.io/projected/32e35c39-078d-476b-888d-211e55d2209f-kube-api-access-drhq9\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.656046 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-catalog-content\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.720342 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.757817 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-utilities\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.758087 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drhq9\" (UniqueName: \"kubernetes.io/projected/32e35c39-078d-476b-888d-211e55d2209f-kube-api-access-drhq9\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.758107 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-catalog-content\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.758548 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-catalog-content\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.758931 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-utilities\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.780238 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drhq9\" (UniqueName: \"kubernetes.io/projected/32e35c39-078d-476b-888d-211e55d2209f-kube-api-access-drhq9\") pod \"redhat-marketplace-lf7gq\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:30 crc kubenswrapper[4685]: I1202 11:00:30.956058 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.211086 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-74vn9"] Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.431279 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7gq"] Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.656050 4685 generic.go:334] "Generic (PLEG): container finished" podID="92ebed65-5895-429c-abb7-eeefca5866bd" containerID="49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195" exitCode=0 Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.656098 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerDied","Data":"49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195"} Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.656403 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerStarted","Data":"dae10191b11d4d103722a2737dd6a652a926d466d42270dab37daa78406e7f94"} Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.657814 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.659388 4685 generic.go:334] "Generic (PLEG): container finished" podID="32e35c39-078d-476b-888d-211e55d2209f" containerID="3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c" exitCode=0 Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.659413 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7gq" event={"ID":"32e35c39-078d-476b-888d-211e55d2209f","Type":"ContainerDied","Data":"3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c"} Dec 02 11:00:31 crc kubenswrapper[4685]: I1202 11:00:31.659430 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7gq" event={"ID":"32e35c39-078d-476b-888d-211e55d2209f","Type":"ContainerStarted","Data":"4ea60e5c33e0b9036c15d1af8eb3811474ec1bd21855221e746ec9f4c58c7c37"} Dec 02 11:00:33 crc kubenswrapper[4685]: I1202 11:00:33.680138 4685 generic.go:334] "Generic (PLEG): container finished" podID="32e35c39-078d-476b-888d-211e55d2209f" containerID="3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993" exitCode=0 Dec 02 11:00:33 crc kubenswrapper[4685]: I1202 11:00:33.680208 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7gq" event={"ID":"32e35c39-078d-476b-888d-211e55d2209f","Type":"ContainerDied","Data":"3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993"} Dec 02 11:00:33 crc kubenswrapper[4685]: I1202 11:00:33.683222 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerStarted","Data":"4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9"} Dec 02 11:00:34 crc kubenswrapper[4685]: I1202 11:00:34.696057 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7gq" event={"ID":"32e35c39-078d-476b-888d-211e55d2209f","Type":"ContainerStarted","Data":"df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7"} Dec 02 11:00:34 crc kubenswrapper[4685]: I1202 11:00:34.698159 4685 generic.go:334] "Generic (PLEG): container finished" podID="92ebed65-5895-429c-abb7-eeefca5866bd" containerID="4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9" exitCode=0 Dec 02 11:00:34 crc kubenswrapper[4685]: I1202 11:00:34.698198 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerDied","Data":"4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9"} Dec 02 11:00:34 crc kubenswrapper[4685]: I1202 11:00:34.716274 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lf7gq" podStartSLOduration=2.117505268 podStartE2EDuration="4.716260228s" podCreationTimestamp="2025-12-02 11:00:30 +0000 UTC" firstStartedPulling="2025-12-02 11:00:31.66073716 +0000 UTC m=+3524.032511314" lastFinishedPulling="2025-12-02 11:00:34.25949212 +0000 UTC m=+3526.631266274" observedRunningTime="2025-12-02 11:00:34.710798459 +0000 UTC m=+3527.082572613" watchObservedRunningTime="2025-12-02 11:00:34.716260228 +0000 UTC m=+3527.088034382" Dec 02 11:00:35 crc kubenswrapper[4685]: I1202 11:00:35.709170 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerStarted","Data":"5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8"} Dec 02 11:00:35 crc kubenswrapper[4685]: I1202 11:00:35.732521 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-74vn9" podStartSLOduration=2.011158406 podStartE2EDuration="5.732502878s" podCreationTimestamp="2025-12-02 11:00:30 +0000 UTC" firstStartedPulling="2025-12-02 11:00:31.657555434 +0000 UTC m=+3524.029329588" lastFinishedPulling="2025-12-02 11:00:35.378899866 +0000 UTC m=+3527.750674060" observedRunningTime="2025-12-02 11:00:35.726831183 +0000 UTC m=+3528.098605337" watchObservedRunningTime="2025-12-02 11:00:35.732502878 +0000 UTC m=+3528.104277032" Dec 02 11:00:40 crc kubenswrapper[4685]: I1202 11:00:40.721873 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:40 crc kubenswrapper[4685]: I1202 11:00:40.722494 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:40 crc kubenswrapper[4685]: I1202 11:00:40.786303 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:40 crc kubenswrapper[4685]: I1202 11:00:40.871743 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:40 crc kubenswrapper[4685]: I1202 11:00:40.959782 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:40 crc kubenswrapper[4685]: I1202 11:00:40.959828 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:41 crc kubenswrapper[4685]: I1202 11:00:41.014525 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:41 crc kubenswrapper[4685]: I1202 11:00:41.035702 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-74vn9"] Dec 02 11:00:41 crc kubenswrapper[4685]: I1202 11:00:41.838036 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:42 crc kubenswrapper[4685]: I1202 11:00:42.798319 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-74vn9" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="registry-server" containerID="cri-o://5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8" gracePeriod=2 Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.300482 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.407356 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhcqp\" (UniqueName: \"kubernetes.io/projected/92ebed65-5895-429c-abb7-eeefca5866bd-kube-api-access-xhcqp\") pod \"92ebed65-5895-429c-abb7-eeefca5866bd\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.407472 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-utilities\") pod \"92ebed65-5895-429c-abb7-eeefca5866bd\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.407637 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-catalog-content\") pod \"92ebed65-5895-429c-abb7-eeefca5866bd\" (UID: \"92ebed65-5895-429c-abb7-eeefca5866bd\") " Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.408283 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-utilities" (OuterVolumeSpecName: "utilities") pod "92ebed65-5895-429c-abb7-eeefca5866bd" (UID: "92ebed65-5895-429c-abb7-eeefca5866bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.419171 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92ebed65-5895-429c-abb7-eeefca5866bd-kube-api-access-xhcqp" (OuterVolumeSpecName: "kube-api-access-xhcqp") pod "92ebed65-5895-429c-abb7-eeefca5866bd" (UID: "92ebed65-5895-429c-abb7-eeefca5866bd"). InnerVolumeSpecName "kube-api-access-xhcqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.439786 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7gq"] Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.463777 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "92ebed65-5895-429c-abb7-eeefca5866bd" (UID: "92ebed65-5895-429c-abb7-eeefca5866bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.510468 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhcqp\" (UniqueName: \"kubernetes.io/projected/92ebed65-5895-429c-abb7-eeefca5866bd-kube-api-access-xhcqp\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.510520 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.510541 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/92ebed65-5895-429c-abb7-eeefca5866bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.814145 4685 generic.go:334] "Generic (PLEG): container finished" podID="92ebed65-5895-429c-abb7-eeefca5866bd" containerID="5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8" exitCode=0 Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.814220 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74vn9" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.814233 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerDied","Data":"5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8"} Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.814344 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74vn9" event={"ID":"92ebed65-5895-429c-abb7-eeefca5866bd","Type":"ContainerDied","Data":"dae10191b11d4d103722a2737dd6a652a926d466d42270dab37daa78406e7f94"} Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.814384 4685 scope.go:117] "RemoveContainer" containerID="5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.814664 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lf7gq" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="registry-server" containerID="cri-o://df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7" gracePeriod=2 Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.858847 4685 scope.go:117] "RemoveContainer" containerID="4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.870586 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-74vn9"] Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.885416 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-74vn9"] Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.886958 4685 scope.go:117] "RemoveContainer" containerID="49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195" Dec 02 11:00:43 crc kubenswrapper[4685]: I1202 11:00:43.916581 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" path="/var/lib/kubelet/pods/92ebed65-5895-429c-abb7-eeefca5866bd/volumes" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.072088 4685 scope.go:117] "RemoveContainer" containerID="5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8" Dec 02 11:00:44 crc kubenswrapper[4685]: E1202 11:00:44.072497 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8\": container with ID starting with 5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8 not found: ID does not exist" containerID="5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.072529 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8"} err="failed to get container status \"5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8\": rpc error: code = NotFound desc = could not find container \"5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8\": container with ID starting with 5dd4042f805df7055621d80eb0f2949bd05a4f429b766a14eb2e52a5bcac05f8 not found: ID does not exist" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.072549 4685 scope.go:117] "RemoveContainer" containerID="4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9" Dec 02 11:00:44 crc kubenswrapper[4685]: E1202 11:00:44.074093 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9\": container with ID starting with 4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9 not found: ID does not exist" containerID="4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.074211 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9"} err="failed to get container status \"4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9\": rpc error: code = NotFound desc = could not find container \"4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9\": container with ID starting with 4217a873afc6b56c1104467af254878f1b26405a9684a60ab2c425277968e6d9 not found: ID does not exist" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.074297 4685 scope.go:117] "RemoveContainer" containerID="49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195" Dec 02 11:00:44 crc kubenswrapper[4685]: E1202 11:00:44.074640 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195\": container with ID starting with 49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195 not found: ID does not exist" containerID="49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.074667 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195"} err="failed to get container status \"49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195\": rpc error: code = NotFound desc = could not find container \"49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195\": container with ID starting with 49481801baea5b2cd1664a0bf680b4802fe9ca44c95424ac9058acb510dd3195 not found: ID does not exist" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.789357 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.829461 4685 generic.go:334] "Generic (PLEG): container finished" podID="32e35c39-078d-476b-888d-211e55d2209f" containerID="df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7" exitCode=0 Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.829491 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7gq" event={"ID":"32e35c39-078d-476b-888d-211e55d2209f","Type":"ContainerDied","Data":"df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7"} Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.829527 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lf7gq" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.829544 4685 scope.go:117] "RemoveContainer" containerID="df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.829519 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lf7gq" event={"ID":"32e35c39-078d-476b-888d-211e55d2209f","Type":"ContainerDied","Data":"4ea60e5c33e0b9036c15d1af8eb3811474ec1bd21855221e746ec9f4c58c7c37"} Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.831415 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drhq9\" (UniqueName: \"kubernetes.io/projected/32e35c39-078d-476b-888d-211e55d2209f-kube-api-access-drhq9\") pod \"32e35c39-078d-476b-888d-211e55d2209f\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.831675 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-utilities\") pod \"32e35c39-078d-476b-888d-211e55d2209f\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.831767 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-catalog-content\") pod \"32e35c39-078d-476b-888d-211e55d2209f\" (UID: \"32e35c39-078d-476b-888d-211e55d2209f\") " Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.833024 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-utilities" (OuterVolumeSpecName: "utilities") pod "32e35c39-078d-476b-888d-211e55d2209f" (UID: "32e35c39-078d-476b-888d-211e55d2209f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.837102 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e35c39-078d-476b-888d-211e55d2209f-kube-api-access-drhq9" (OuterVolumeSpecName: "kube-api-access-drhq9") pod "32e35c39-078d-476b-888d-211e55d2209f" (UID: "32e35c39-078d-476b-888d-211e55d2209f"). InnerVolumeSpecName "kube-api-access-drhq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.847683 4685 scope.go:117] "RemoveContainer" containerID="3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.879055 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32e35c39-078d-476b-888d-211e55d2209f" (UID: "32e35c39-078d-476b-888d-211e55d2209f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.909160 4685 scope.go:117] "RemoveContainer" containerID="3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.923596 4685 scope.go:117] "RemoveContainer" containerID="df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7" Dec 02 11:00:44 crc kubenswrapper[4685]: E1202 11:00:44.923967 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7\": container with ID starting with df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7 not found: ID does not exist" containerID="df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.923998 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7"} err="failed to get container status \"df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7\": rpc error: code = NotFound desc = could not find container \"df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7\": container with ID starting with df7497e1d0aa1042363649025b1edcb49bba53a8ac41a86ef82db17a27ad48d7 not found: ID does not exist" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.924023 4685 scope.go:117] "RemoveContainer" containerID="3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993" Dec 02 11:00:44 crc kubenswrapper[4685]: E1202 11:00:44.924235 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993\": container with ID starting with 3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993 not found: ID does not exist" containerID="3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.924269 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993"} err="failed to get container status \"3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993\": rpc error: code = NotFound desc = could not find container \"3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993\": container with ID starting with 3432c070c7ae512e5d07198ead7e146862f9e815b1f475959d52fc81ed1cf993 not found: ID does not exist" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.924287 4685 scope.go:117] "RemoveContainer" containerID="3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c" Dec 02 11:00:44 crc kubenswrapper[4685]: E1202 11:00:44.924528 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c\": container with ID starting with 3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c not found: ID does not exist" containerID="3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.924550 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c"} err="failed to get container status \"3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c\": rpc error: code = NotFound desc = could not find container \"3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c\": container with ID starting with 3cd76c2985d7bd46f8d0e91e4a4e0c34d0035fb61ef55307f0946b4a8f82795c not found: ID does not exist" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.933385 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.933408 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e35c39-078d-476b-888d-211e55d2209f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:44 crc kubenswrapper[4685]: I1202 11:00:44.933422 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drhq9\" (UniqueName: \"kubernetes.io/projected/32e35c39-078d-476b-888d-211e55d2209f-kube-api-access-drhq9\") on node \"crc\" DevicePath \"\"" Dec 02 11:00:45 crc kubenswrapper[4685]: I1202 11:00:45.164392 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7gq"] Dec 02 11:00:45 crc kubenswrapper[4685]: I1202 11:00:45.173007 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lf7gq"] Dec 02 11:00:45 crc kubenswrapper[4685]: I1202 11:00:45.915365 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e35c39-078d-476b-888d-211e55d2209f" path="/var/lib/kubelet/pods/32e35c39-078d-476b-888d-211e55d2209f/volumes" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.169261 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29411221-xnlhc"] Dec 02 11:01:00 crc kubenswrapper[4685]: E1202 11:01:00.170283 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="extract-utilities" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170300 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="extract-utilities" Dec 02 11:01:00 crc kubenswrapper[4685]: E1202 11:01:00.170320 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="extract-content" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170329 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="extract-content" Dec 02 11:01:00 crc kubenswrapper[4685]: E1202 11:01:00.170368 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="extract-content" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170377 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="extract-content" Dec 02 11:01:00 crc kubenswrapper[4685]: E1202 11:01:00.170402 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="extract-utilities" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170411 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="extract-utilities" Dec 02 11:01:00 crc kubenswrapper[4685]: E1202 11:01:00.170429 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="registry-server" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170437 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="registry-server" Dec 02 11:01:00 crc kubenswrapper[4685]: E1202 11:01:00.170462 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="registry-server" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170470 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="registry-server" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170707 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="92ebed65-5895-429c-abb7-eeefca5866bd" containerName="registry-server" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.170729 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e35c39-078d-476b-888d-211e55d2209f" containerName="registry-server" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.171504 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.199926 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411221-xnlhc"] Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.243272 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slwv9\" (UniqueName: \"kubernetes.io/projected/e8058fba-9754-4c55-b7fc-85ec08c67c99-kube-api-access-slwv9\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.243866 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-fernet-keys\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.244132 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-config-data\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.244324 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-combined-ca-bundle\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.345113 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-config-data\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.345198 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-combined-ca-bundle\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.345237 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slwv9\" (UniqueName: \"kubernetes.io/projected/e8058fba-9754-4c55-b7fc-85ec08c67c99-kube-api-access-slwv9\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.345280 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-fernet-keys\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.352983 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-config-data\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.355379 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-combined-ca-bundle\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.362186 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-fernet-keys\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.366119 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slwv9\" (UniqueName: \"kubernetes.io/projected/e8058fba-9754-4c55-b7fc-85ec08c67c99-kube-api-access-slwv9\") pod \"keystone-cron-29411221-xnlhc\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.491822 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.962108 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29411221-xnlhc"] Dec 02 11:01:00 crc kubenswrapper[4685]: I1202 11:01:00.983524 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411221-xnlhc" event={"ID":"e8058fba-9754-4c55-b7fc-85ec08c67c99","Type":"ContainerStarted","Data":"f3b400a5d4526080afd87ca4bb10dbf927f3d680a40256dcaa161d96549289c7"} Dec 02 11:01:02 crc kubenswrapper[4685]: I1202 11:01:02.004024 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411221-xnlhc" event={"ID":"e8058fba-9754-4c55-b7fc-85ec08c67c99","Type":"ContainerStarted","Data":"f71b73b222f025f7bddf9bd1d9d20f75a1081b112c3492330824de071558a70a"} Dec 02 11:01:02 crc kubenswrapper[4685]: I1202 11:01:02.035375 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29411221-xnlhc" podStartSLOduration=2.035349745 podStartE2EDuration="2.035349745s" podCreationTimestamp="2025-12-02 11:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:01:02.02638928 +0000 UTC m=+3554.398163454" watchObservedRunningTime="2025-12-02 11:01:02.035349745 +0000 UTC m=+3554.407123909" Dec 02 11:01:04 crc kubenswrapper[4685]: I1202 11:01:04.027674 4685 generic.go:334] "Generic (PLEG): container finished" podID="e8058fba-9754-4c55-b7fc-85ec08c67c99" containerID="f71b73b222f025f7bddf9bd1d9d20f75a1081b112c3492330824de071558a70a" exitCode=0 Dec 02 11:01:04 crc kubenswrapper[4685]: I1202 11:01:04.027771 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411221-xnlhc" event={"ID":"e8058fba-9754-4c55-b7fc-85ec08c67c99","Type":"ContainerDied","Data":"f71b73b222f025f7bddf9bd1d9d20f75a1081b112c3492330824de071558a70a"} Dec 02 11:01:04 crc kubenswrapper[4685]: I1202 11:01:04.903311 4685 scope.go:117] "RemoveContainer" containerID="1973465ea9fccfd5a052956e335d2663c0b4c615a89d8330a849d73f4e8033a3" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.365128 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.457482 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slwv9\" (UniqueName: \"kubernetes.io/projected/e8058fba-9754-4c55-b7fc-85ec08c67c99-kube-api-access-slwv9\") pod \"e8058fba-9754-4c55-b7fc-85ec08c67c99\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.457588 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-combined-ca-bundle\") pod \"e8058fba-9754-4c55-b7fc-85ec08c67c99\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.457618 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-config-data\") pod \"e8058fba-9754-4c55-b7fc-85ec08c67c99\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.457704 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-fernet-keys\") pod \"e8058fba-9754-4c55-b7fc-85ec08c67c99\" (UID: \"e8058fba-9754-4c55-b7fc-85ec08c67c99\") " Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.463662 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e8058fba-9754-4c55-b7fc-85ec08c67c99" (UID: "e8058fba-9754-4c55-b7fc-85ec08c67c99"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.466489 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8058fba-9754-4c55-b7fc-85ec08c67c99-kube-api-access-slwv9" (OuterVolumeSpecName: "kube-api-access-slwv9") pod "e8058fba-9754-4c55-b7fc-85ec08c67c99" (UID: "e8058fba-9754-4c55-b7fc-85ec08c67c99"). InnerVolumeSpecName "kube-api-access-slwv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.485334 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8058fba-9754-4c55-b7fc-85ec08c67c99" (UID: "e8058fba-9754-4c55-b7fc-85ec08c67c99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.506493 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-config-data" (OuterVolumeSpecName: "config-data") pod "e8058fba-9754-4c55-b7fc-85ec08c67c99" (UID: "e8058fba-9754-4c55-b7fc-85ec08c67c99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.560062 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slwv9\" (UniqueName: \"kubernetes.io/projected/e8058fba-9754-4c55-b7fc-85ec08c67c99-kube-api-access-slwv9\") on node \"crc\" DevicePath \"\"" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.560095 4685 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.560106 4685 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-config-data\") on node \"crc\" DevicePath \"\"" Dec 02 11:01:05 crc kubenswrapper[4685]: I1202 11:01:05.560116 4685 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e8058fba-9754-4c55-b7fc-85ec08c67c99-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 02 11:01:06 crc kubenswrapper[4685]: I1202 11:01:06.050858 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29411221-xnlhc" event={"ID":"e8058fba-9754-4c55-b7fc-85ec08c67c99","Type":"ContainerDied","Data":"f3b400a5d4526080afd87ca4bb10dbf927f3d680a40256dcaa161d96549289c7"} Dec 02 11:01:06 crc kubenswrapper[4685]: I1202 11:01:06.051175 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3b400a5d4526080afd87ca4bb10dbf927f3d680a40256dcaa161d96549289c7" Dec 02 11:01:06 crc kubenswrapper[4685]: I1202 11:01:06.050979 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29411221-xnlhc" Dec 02 11:01:12 crc kubenswrapper[4685]: I1202 11:01:12.147821 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:01:12 crc kubenswrapper[4685]: I1202 11:01:12.148923 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:01:42 crc kubenswrapper[4685]: I1202 11:01:42.147860 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:01:42 crc kubenswrapper[4685]: I1202 11:01:42.148413 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.102939 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f7twl/must-gather-hds9f"] Dec 02 11:01:43 crc kubenswrapper[4685]: E1202 11:01:43.103585 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8058fba-9754-4c55-b7fc-85ec08c67c99" containerName="keystone-cron" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.103601 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8058fba-9754-4c55-b7fc-85ec08c67c99" containerName="keystone-cron" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.103767 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8058fba-9754-4c55-b7fc-85ec08c67c99" containerName="keystone-cron" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.104704 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.108102 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-f7twl"/"openshift-service-ca.crt" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.108271 4685 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-f7twl"/"default-dockercfg-zqdzx" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.113598 4685 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-f7twl"/"kube-root-ca.crt" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.120202 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-f7twl/must-gather-hds9f"] Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.231665 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4a595559-df01-40d0-ad8b-e781bad99ce1-must-gather-output\") pod \"must-gather-hds9f\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.231762 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2px4g\" (UniqueName: \"kubernetes.io/projected/4a595559-df01-40d0-ad8b-e781bad99ce1-kube-api-access-2px4g\") pod \"must-gather-hds9f\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.333093 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2px4g\" (UniqueName: \"kubernetes.io/projected/4a595559-df01-40d0-ad8b-e781bad99ce1-kube-api-access-2px4g\") pod \"must-gather-hds9f\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.333220 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4a595559-df01-40d0-ad8b-e781bad99ce1-must-gather-output\") pod \"must-gather-hds9f\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.333642 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4a595559-df01-40d0-ad8b-e781bad99ce1-must-gather-output\") pod \"must-gather-hds9f\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.354324 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2px4g\" (UniqueName: \"kubernetes.io/projected/4a595559-df01-40d0-ad8b-e781bad99ce1-kube-api-access-2px4g\") pod \"must-gather-hds9f\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.421750 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:01:43 crc kubenswrapper[4685]: I1202 11:01:43.968308 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-f7twl/must-gather-hds9f"] Dec 02 11:01:44 crc kubenswrapper[4685]: I1202 11:01:44.663919 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/must-gather-hds9f" event={"ID":"4a595559-df01-40d0-ad8b-e781bad99ce1","Type":"ContainerStarted","Data":"06d54a191e75a1f75ca94dc22023e8087ab425a1b8c367a246ad2a9fe21328cb"} Dec 02 11:01:44 crc kubenswrapper[4685]: I1202 11:01:44.664422 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/must-gather-hds9f" event={"ID":"4a595559-df01-40d0-ad8b-e781bad99ce1","Type":"ContainerStarted","Data":"b4cfe9510147f450c77603e18e072c253fe995482a4728b06ae02f6a6defc630"} Dec 02 11:01:44 crc kubenswrapper[4685]: I1202 11:01:44.664441 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/must-gather-hds9f" event={"ID":"4a595559-df01-40d0-ad8b-e781bad99ce1","Type":"ContainerStarted","Data":"447cc349bb9ce23df73c08d6cb929581709ea6cf3a389f29e50ae885ac5452a2"} Dec 02 11:01:44 crc kubenswrapper[4685]: I1202 11:01:44.681039 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-f7twl/must-gather-hds9f" podStartSLOduration=1.681019826 podStartE2EDuration="1.681019826s" podCreationTimestamp="2025-12-02 11:01:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:01:44.680409408 +0000 UTC m=+3597.052183562" watchObservedRunningTime="2025-12-02 11:01:44.681019826 +0000 UTC m=+3597.052793980" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.176744 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f7twl/crc-debug-s6br6"] Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.183423 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.353730 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/409be931-5948-40bf-a525-6345925f3dc9-host\") pod \"crc-debug-s6br6\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.353868 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrzwl\" (UniqueName: \"kubernetes.io/projected/409be931-5948-40bf-a525-6345925f3dc9-kube-api-access-zrzwl\") pod \"crc-debug-s6br6\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.455686 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/409be931-5948-40bf-a525-6345925f3dc9-host\") pod \"crc-debug-s6br6\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.455838 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrzwl\" (UniqueName: \"kubernetes.io/projected/409be931-5948-40bf-a525-6345925f3dc9-kube-api-access-zrzwl\") pod \"crc-debug-s6br6\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.455837 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/409be931-5948-40bf-a525-6345925f3dc9-host\") pod \"crc-debug-s6br6\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.474404 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrzwl\" (UniqueName: \"kubernetes.io/projected/409be931-5948-40bf-a525-6345925f3dc9-kube-api-access-zrzwl\") pod \"crc-debug-s6br6\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.511479 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:01:48 crc kubenswrapper[4685]: I1202 11:01:48.696875 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-s6br6" event={"ID":"409be931-5948-40bf-a525-6345925f3dc9","Type":"ContainerStarted","Data":"537bfdff4ec2b6f09f4dea43dd8841405a4e363d8bff64c614b0385198019a18"} Dec 02 11:01:49 crc kubenswrapper[4685]: I1202 11:01:49.709488 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-s6br6" event={"ID":"409be931-5948-40bf-a525-6345925f3dc9","Type":"ContainerStarted","Data":"48ebd2910ee69667f19a9bebb9aa075289c604cb535f11638290b840d2b5824f"} Dec 02 11:01:49 crc kubenswrapper[4685]: I1202 11:01:49.732094 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-f7twl/crc-debug-s6br6" podStartSLOduration=1.7320760160000002 podStartE2EDuration="1.732076016s" podCreationTimestamp="2025-12-02 11:01:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-02 11:01:49.724740015 +0000 UTC m=+3602.096514169" watchObservedRunningTime="2025-12-02 11:01:49.732076016 +0000 UTC m=+3602.103850190" Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.147849 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.148387 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.148431 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.149202 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.149263 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" gracePeriod=600 Dec 02 11:02:12 crc kubenswrapper[4685]: E1202 11:02:12.281825 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.934492 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" exitCode=0 Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.934591 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece"} Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.934786 4685 scope.go:117] "RemoveContainer" containerID="20cc17afc70d7c15755ebf3185cf6db25f125900540259ba4fbfe1ecb29d6d64" Dec 02 11:02:12 crc kubenswrapper[4685]: I1202 11:02:12.935522 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:02:12 crc kubenswrapper[4685]: E1202 11:02:12.935920 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:02:21 crc kubenswrapper[4685]: I1202 11:02:21.002621 4685 generic.go:334] "Generic (PLEG): container finished" podID="409be931-5948-40bf-a525-6345925f3dc9" containerID="48ebd2910ee69667f19a9bebb9aa075289c604cb535f11638290b840d2b5824f" exitCode=0 Dec 02 11:02:21 crc kubenswrapper[4685]: I1202 11:02:21.002731 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-s6br6" event={"ID":"409be931-5948-40bf-a525-6345925f3dc9","Type":"ContainerDied","Data":"48ebd2910ee69667f19a9bebb9aa075289c604cb535f11638290b840d2b5824f"} Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.107302 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.158248 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f7twl/crc-debug-s6br6"] Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.158316 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f7twl/crc-debug-s6br6"] Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.195206 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrzwl\" (UniqueName: \"kubernetes.io/projected/409be931-5948-40bf-a525-6345925f3dc9-kube-api-access-zrzwl\") pod \"409be931-5948-40bf-a525-6345925f3dc9\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.195416 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/409be931-5948-40bf-a525-6345925f3dc9-host\") pod \"409be931-5948-40bf-a525-6345925f3dc9\" (UID: \"409be931-5948-40bf-a525-6345925f3dc9\") " Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.195852 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/409be931-5948-40bf-a525-6345925f3dc9-host" (OuterVolumeSpecName: "host") pod "409be931-5948-40bf-a525-6345925f3dc9" (UID: "409be931-5948-40bf-a525-6345925f3dc9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.212827 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/409be931-5948-40bf-a525-6345925f3dc9-kube-api-access-zrzwl" (OuterVolumeSpecName: "kube-api-access-zrzwl") pod "409be931-5948-40bf-a525-6345925f3dc9" (UID: "409be931-5948-40bf-a525-6345925f3dc9"). InnerVolumeSpecName "kube-api-access-zrzwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.297148 4685 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/409be931-5948-40bf-a525-6345925f3dc9-host\") on node \"crc\" DevicePath \"\"" Dec 02 11:02:22 crc kubenswrapper[4685]: I1202 11:02:22.297190 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrzwl\" (UniqueName: \"kubernetes.io/projected/409be931-5948-40bf-a525-6345925f3dc9-kube-api-access-zrzwl\") on node \"crc\" DevicePath \"\"" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.021443 4685 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="537bfdff4ec2b6f09f4dea43dd8841405a4e363d8bff64c614b0385198019a18" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.021511 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-s6br6" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.470191 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f7twl/crc-debug-k92hc"] Dec 02 11:02:23 crc kubenswrapper[4685]: E1202 11:02:23.471247 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="409be931-5948-40bf-a525-6345925f3dc9" containerName="container-00" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.471327 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="409be931-5948-40bf-a525-6345925f3dc9" containerName="container-00" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.471546 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="409be931-5948-40bf-a525-6345925f3dc9" containerName="container-00" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.472196 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.636512 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pphtg\" (UniqueName: \"kubernetes.io/projected/7129ac32-5c1d-448a-b353-ff059b7510b2-kube-api-access-pphtg\") pod \"crc-debug-k92hc\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.637055 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7129ac32-5c1d-448a-b353-ff059b7510b2-host\") pod \"crc-debug-k92hc\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.739517 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7129ac32-5c1d-448a-b353-ff059b7510b2-host\") pod \"crc-debug-k92hc\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.739711 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pphtg\" (UniqueName: \"kubernetes.io/projected/7129ac32-5c1d-448a-b353-ff059b7510b2-kube-api-access-pphtg\") pod \"crc-debug-k92hc\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.739707 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7129ac32-5c1d-448a-b353-ff059b7510b2-host\") pod \"crc-debug-k92hc\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.765115 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pphtg\" (UniqueName: \"kubernetes.io/projected/7129ac32-5c1d-448a-b353-ff059b7510b2-kube-api-access-pphtg\") pod \"crc-debug-k92hc\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.788046 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:23 crc kubenswrapper[4685]: I1202 11:02:23.912483 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="409be931-5948-40bf-a525-6345925f3dc9" path="/var/lib/kubelet/pods/409be931-5948-40bf-a525-6345925f3dc9/volumes" Dec 02 11:02:24 crc kubenswrapper[4685]: I1202 11:02:24.032183 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-k92hc" event={"ID":"7129ac32-5c1d-448a-b353-ff059b7510b2","Type":"ContainerStarted","Data":"76b8115cd78a8fa749b9b820c09e8cf4b7c5be93dc844dc633ab9da1663f544a"} Dec 02 11:02:25 crc kubenswrapper[4685]: I1202 11:02:25.047413 4685 generic.go:334] "Generic (PLEG): container finished" podID="7129ac32-5c1d-448a-b353-ff059b7510b2" containerID="1de2183a9df8dcea4efe9fac1ecab99942fcd76755965d3cfde1cff8fe431c79" exitCode=0 Dec 02 11:02:25 crc kubenswrapper[4685]: I1202 11:02:25.047614 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-k92hc" event={"ID":"7129ac32-5c1d-448a-b353-ff059b7510b2","Type":"ContainerDied","Data":"1de2183a9df8dcea4efe9fac1ecab99942fcd76755965d3cfde1cff8fe431c79"} Dec 02 11:02:25 crc kubenswrapper[4685]: I1202 11:02:25.496881 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f7twl/crc-debug-k92hc"] Dec 02 11:02:25 crc kubenswrapper[4685]: I1202 11:02:25.508339 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f7twl/crc-debug-k92hc"] Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.171276 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.285023 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pphtg\" (UniqueName: \"kubernetes.io/projected/7129ac32-5c1d-448a-b353-ff059b7510b2-kube-api-access-pphtg\") pod \"7129ac32-5c1d-448a-b353-ff059b7510b2\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.285415 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7129ac32-5c1d-448a-b353-ff059b7510b2-host\") pod \"7129ac32-5c1d-448a-b353-ff059b7510b2\" (UID: \"7129ac32-5c1d-448a-b353-ff059b7510b2\") " Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.285523 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7129ac32-5c1d-448a-b353-ff059b7510b2-host" (OuterVolumeSpecName: "host") pod "7129ac32-5c1d-448a-b353-ff059b7510b2" (UID: "7129ac32-5c1d-448a-b353-ff059b7510b2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.285948 4685 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7129ac32-5c1d-448a-b353-ff059b7510b2-host\") on node \"crc\" DevicePath \"\"" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.291431 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7129ac32-5c1d-448a-b353-ff059b7510b2-kube-api-access-pphtg" (OuterVolumeSpecName: "kube-api-access-pphtg") pod "7129ac32-5c1d-448a-b353-ff059b7510b2" (UID: "7129ac32-5c1d-448a-b353-ff059b7510b2"). InnerVolumeSpecName "kube-api-access-pphtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.387958 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pphtg\" (UniqueName: \"kubernetes.io/projected/7129ac32-5c1d-448a-b353-ff059b7510b2-kube-api-access-pphtg\") on node \"crc\" DevicePath \"\"" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.710026 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-f7twl/crc-debug-9xqff"] Dec 02 11:02:26 crc kubenswrapper[4685]: E1202 11:02:26.710728 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7129ac32-5c1d-448a-b353-ff059b7510b2" containerName="container-00" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.710745 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="7129ac32-5c1d-448a-b353-ff059b7510b2" containerName="container-00" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.711002 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="7129ac32-5c1d-448a-b353-ff059b7510b2" containerName="container-00" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.712010 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.899530 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0739a66-fd7c-4b5a-9946-fd660df8e833-host\") pod \"crc-debug-9xqff\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.899888 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bp5nn\" (UniqueName: \"kubernetes.io/projected/a0739a66-fd7c-4b5a-9946-fd660df8e833-kube-api-access-bp5nn\") pod \"crc-debug-9xqff\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:26 crc kubenswrapper[4685]: I1202 11:02:26.901055 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:02:26 crc kubenswrapper[4685]: E1202 11:02:26.901405 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.001509 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bp5nn\" (UniqueName: \"kubernetes.io/projected/a0739a66-fd7c-4b5a-9946-fd660df8e833-kube-api-access-bp5nn\") pod \"crc-debug-9xqff\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.001705 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0739a66-fd7c-4b5a-9946-fd660df8e833-host\") pod \"crc-debug-9xqff\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.001959 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0739a66-fd7c-4b5a-9946-fd660df8e833-host\") pod \"crc-debug-9xqff\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.018816 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bp5nn\" (UniqueName: \"kubernetes.io/projected/a0739a66-fd7c-4b5a-9946-fd660df8e833-kube-api-access-bp5nn\") pod \"crc-debug-9xqff\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.032266 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:27 crc kubenswrapper[4685]: W1202 11:02:27.059361 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0739a66_fd7c_4b5a_9946_fd660df8e833.slice/crio-f2e3b47fd3cd7146a7ba4d3930321b73e2ef3db5171072bda8b0407ec4ff1b27 WatchSource:0}: Error finding container f2e3b47fd3cd7146a7ba4d3930321b73e2ef3db5171072bda8b0407ec4ff1b27: Status 404 returned error can't find the container with id f2e3b47fd3cd7146a7ba4d3930321b73e2ef3db5171072bda8b0407ec4ff1b27 Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.072498 4685 scope.go:117] "RemoveContainer" containerID="1de2183a9df8dcea4efe9fac1ecab99942fcd76755965d3cfde1cff8fe431c79" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.072657 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-k92hc" Dec 02 11:02:27 crc kubenswrapper[4685]: I1202 11:02:27.911102 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7129ac32-5c1d-448a-b353-ff059b7510b2" path="/var/lib/kubelet/pods/7129ac32-5c1d-448a-b353-ff059b7510b2/volumes" Dec 02 11:02:28 crc kubenswrapper[4685]: I1202 11:02:28.087466 4685 generic.go:334] "Generic (PLEG): container finished" podID="a0739a66-fd7c-4b5a-9946-fd660df8e833" containerID="108c72104c4070407aebaaee1264f35496702ce71aa551f1ce07caa6f3b06d2b" exitCode=0 Dec 02 11:02:28 crc kubenswrapper[4685]: I1202 11:02:28.087526 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-9xqff" event={"ID":"a0739a66-fd7c-4b5a-9946-fd660df8e833","Type":"ContainerDied","Data":"108c72104c4070407aebaaee1264f35496702ce71aa551f1ce07caa6f3b06d2b"} Dec 02 11:02:28 crc kubenswrapper[4685]: I1202 11:02:28.087609 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/crc-debug-9xqff" event={"ID":"a0739a66-fd7c-4b5a-9946-fd660df8e833","Type":"ContainerStarted","Data":"f2e3b47fd3cd7146a7ba4d3930321b73e2ef3db5171072bda8b0407ec4ff1b27"} Dec 02 11:02:28 crc kubenswrapper[4685]: I1202 11:02:28.132122 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f7twl/crc-debug-9xqff"] Dec 02 11:02:28 crc kubenswrapper[4685]: I1202 11:02:28.144542 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f7twl/crc-debug-9xqff"] Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.209039 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.344862 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bp5nn\" (UniqueName: \"kubernetes.io/projected/a0739a66-fd7c-4b5a-9946-fd660df8e833-kube-api-access-bp5nn\") pod \"a0739a66-fd7c-4b5a-9946-fd660df8e833\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.345091 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0739a66-fd7c-4b5a-9946-fd660df8e833-host\") pod \"a0739a66-fd7c-4b5a-9946-fd660df8e833\" (UID: \"a0739a66-fd7c-4b5a-9946-fd660df8e833\") " Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.345496 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0739a66-fd7c-4b5a-9946-fd660df8e833-host" (OuterVolumeSpecName: "host") pod "a0739a66-fd7c-4b5a-9946-fd660df8e833" (UID: "a0739a66-fd7c-4b5a-9946-fd660df8e833"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.346214 4685 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a0739a66-fd7c-4b5a-9946-fd660df8e833-host\") on node \"crc\" DevicePath \"\"" Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.369545 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0739a66-fd7c-4b5a-9946-fd660df8e833-kube-api-access-bp5nn" (OuterVolumeSpecName: "kube-api-access-bp5nn") pod "a0739a66-fd7c-4b5a-9946-fd660df8e833" (UID: "a0739a66-fd7c-4b5a-9946-fd660df8e833"). InnerVolumeSpecName "kube-api-access-bp5nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.447536 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bp5nn\" (UniqueName: \"kubernetes.io/projected/a0739a66-fd7c-4b5a-9946-fd660df8e833-kube-api-access-bp5nn\") on node \"crc\" DevicePath \"\"" Dec 02 11:02:29 crc kubenswrapper[4685]: I1202 11:02:29.911335 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0739a66-fd7c-4b5a-9946-fd660df8e833" path="/var/lib/kubelet/pods/a0739a66-fd7c-4b5a-9946-fd660df8e833/volumes" Dec 02 11:02:30 crc kubenswrapper[4685]: I1202 11:02:30.111674 4685 scope.go:117] "RemoveContainer" containerID="108c72104c4070407aebaaee1264f35496702ce71aa551f1ce07caa6f3b06d2b" Dec 02 11:02:30 crc kubenswrapper[4685]: I1202 11:02:30.111781 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/crc-debug-9xqff" Dec 02 11:02:38 crc kubenswrapper[4685]: I1202 11:02:38.901442 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:02:38 crc kubenswrapper[4685]: E1202 11:02:38.902162 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:02:49 crc kubenswrapper[4685]: I1202 11:02:49.899615 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:02:49 crc kubenswrapper[4685]: E1202 11:02:49.900251 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:02:58 crc kubenswrapper[4685]: I1202 11:02:58.605904 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56bd4844cd-s5f2m_746491bf-6d00-4370-a7ba-740687bd6faa/barbican-api/0.log" Dec 02 11:02:58 crc kubenswrapper[4685]: I1202 11:02:58.822449 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-56bd4844cd-s5f2m_746491bf-6d00-4370-a7ba-740687bd6faa/barbican-api-log/0.log" Dec 02 11:02:58 crc kubenswrapper[4685]: I1202 11:02:58.837184 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b57b745f4-h769r_48d319ac-7ed6-48dd-b934-91833d81dd44/barbican-keystone-listener/0.log" Dec 02 11:02:58 crc kubenswrapper[4685]: I1202 11:02:58.888044 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7b57b745f4-h769r_48d319ac-7ed6-48dd-b934-91833d81dd44/barbican-keystone-listener-log/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.003279 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cd49d95f9-sdvx2_f2daca7c-e2f6-4e26-a1d8-259d2123c2d6/barbican-worker/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.109007 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6cd49d95f9-sdvx2_f2daca7c-e2f6-4e26-a1d8-259d2123c2d6/barbican-worker-log/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.250918 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-h5ltm_d8a0aadd-4e71-45c1-a810-fae1955f590f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.643691 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/ceilometer-central-agent/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.701031 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/ceilometer-notification-agent/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.773145 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/proxy-httpd/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.808405 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2b4cba34-5e49-48a8-a496-fe5998d56f09/sg-core/0.log" Dec 02 11:02:59 crc kubenswrapper[4685]: I1202 11:02:59.937693 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_62f488e9-6105-488d-bb01-f612db6e1fae/cinder-api/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.077163 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_62f488e9-6105-488d-bb01-f612db6e1fae/cinder-api-log/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.206370 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_880ba69c-db0d-4fab-b46d-45c7e8915684/cinder-scheduler/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.320514 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_880ba69c-db0d-4fab-b46d-45c7e8915684/probe/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.445661 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-899q5_fb248306-2b41-458e-9127-987af525ae12/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.540711 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-8sxh6_459c09ff-049f-4edf-b41c-dc203f6527dc/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.683699 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-gpxbh_7e14e196-7cc2-49ba-8fd9-fdafa0c0727d/init/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.812591 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-gpxbh_7e14e196-7cc2-49ba-8fd9-fdafa0c0727d/init/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.932785 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-bpd44_1fcc7f1b-7385-47f7-9ec5-90b1f3f8042c/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:00 crc kubenswrapper[4685]: I1202 11:03:00.968404 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd9bffc9-gpxbh_7e14e196-7cc2-49ba-8fd9-fdafa0c0727d/dnsmasq-dns/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.190533 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ed1ab1f1-1e32-439f-91cf-ba12aca0273a/glance-httpd/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.207046 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ed1ab1f1-1e32-439f-91cf-ba12aca0273a/glance-log/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.359717 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37ce4a0b-7871-4784-a19e-36bd7b62ebbe/glance-httpd/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.436431 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37ce4a0b-7871-4784-a19e-36bd7b62ebbe/glance-log/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.625400 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-79bf856964-lh2w8_3d251819-b2e8-4cc5-b56c-977ea549bf2f/horizon/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.845364 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-79bf856964-lh2w8_3d251819-b2e8-4cc5-b56c-977ea549bf2f/horizon-log/0.log" Dec 02 11:03:01 crc kubenswrapper[4685]: I1202 11:03:01.885804 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-wjmdw_b8bea067-fabf-4a0b-b873-05104a785c39/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:02 crc kubenswrapper[4685]: I1202 11:03:02.135692 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-t7jvq_e08815ae-1633-46ac-85ce-3aa867348763/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:02 crc kubenswrapper[4685]: I1202 11:03:02.226183 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6549967485-7j47k_8f4445dd-b293-4beb-af28-3d6dcf902a94/keystone-api/0.log" Dec 02 11:03:02 crc kubenswrapper[4685]: I1202 11:03:02.428058 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29411221-xnlhc_e8058fba-9754-4c55-b7fc-85ec08c67c99/keystone-cron/0.log" Dec 02 11:03:02 crc kubenswrapper[4685]: I1202 11:03:02.428469 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_23124bbd-78d8-4c4f-a2ae-8f3ee8e7186c/kube-state-metrics/0.log" Dec 02 11:03:02 crc kubenswrapper[4685]: I1202 11:03:02.844594 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-x5tgm_5a0df3ff-af5b-4aa9-b108-cc65a1f43571/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:03 crc kubenswrapper[4685]: I1202 11:03:03.101137 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6dc8d84c85-x99fw_c4743ed7-a0f7-46b1-b0d7-50828835440e/neutron-httpd/0.log" Dec 02 11:03:03 crc kubenswrapper[4685]: I1202 11:03:03.156802 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6dc8d84c85-x99fw_c4743ed7-a0f7-46b1-b0d7-50828835440e/neutron-api/0.log" Dec 02 11:03:03 crc kubenswrapper[4685]: I1202 11:03:03.474275 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_449de76a-491d-4874-8b46-df24eb5c628a/memcached/0.log" Dec 02 11:03:03 crc kubenswrapper[4685]: I1202 11:03:03.488390 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-q7h52_9fc6409b-b597-4290-9d9d-313fa733ddf7/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:03 crc kubenswrapper[4685]: I1202 11:03:03.999229 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2/nova-api-log/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.062537 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_587a88c2-acbc-4104-959f-8dbc52f511de/nova-cell0-conductor-conductor/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.220698 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_6d403a63-e543-4bc9-8f38-daaee1ceb4e6/nova-cell1-conductor-conductor/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.223283 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5ac39f0c-a9f2-4a7b-b34b-0b0e7371cff2/nova-api-api/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.401524 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_009e6abc-834a-4487-b70d-0ec6e64d994d/nova-cell1-novncproxy-novncproxy/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.540226 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-t8bqh_ab586931-b7c5-450d-831c-ca05dbb865ad/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.597955 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_2f14ae08-f9e6-41bc-bb0c-9e6450267d63/nova-metadata-log/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.884003 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b8b721a8-0f97-4892-8c89-56382988595e/nova-scheduler-scheduler/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.894280 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_67ed831e-7122-4e6f-a320-51df1849c1d7/mysql-bootstrap/0.log" Dec 02 11:03:04 crc kubenswrapper[4685]: I1202 11:03:04.899181 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:03:04 crc kubenswrapper[4685]: E1202 11:03:04.899389 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.118962 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_67ed831e-7122-4e6f-a320-51df1849c1d7/mysql-bootstrap/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.173186 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_67ed831e-7122-4e6f-a320-51df1849c1d7/galera/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.175670 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_13cc5021-b162-434b-8ae9-d3781b6f421e/mysql-bootstrap/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.398902 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_2f14ae08-f9e6-41bc-bb0c-9e6450267d63/nova-metadata-metadata/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.445197 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_13cc5021-b162-434b-8ae9-d3781b6f421e/mysql-bootstrap/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.511949 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_13cc5021-b162-434b-8ae9-d3781b6f421e/galera/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.526506 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_62bb7fcc-d4cc-4b1d-b29a-98d6f3441731/openstackclient/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.705833 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-2sntp_ee7d3162-98e7-4af9-aad0-2098e23d1743/ovn-controller/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.732911 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-98qz8_12ca7abe-455f-4bfc-9da9-420462c92e69/openstack-network-exporter/0.log" Dec 02 11:03:05 crc kubenswrapper[4685]: I1202 11:03:05.920482 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovsdb-server-init/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.111097 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovs-vswitchd/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.117466 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovsdb-server/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.199093 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c8mrc_eac3c6e2-9ac7-40a0-91e5-a54010d07a99/ovsdb-server-init/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.250459 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-xd6j6_56e7d4c2-fb54-4104-ad31-0d90ceafd0ff/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.355157 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a8e44cc8-073c-4db3-af8b-c6b18bb2c808/openstack-network-exporter/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.412357 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a8e44cc8-073c-4db3-af8b-c6b18bb2c808/ovn-northd/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.493382 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0875b84e-91b6-4092-b8b3-a75abd86728d/openstack-network-exporter/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.599737 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0875b84e-91b6-4092-b8b3-a75abd86728d/ovsdbserver-nb/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.644334 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03278bdb-7697-4dd0-b482-97b93aa055ba/openstack-network-exporter/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.754623 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_03278bdb-7697-4dd0-b482-97b93aa055ba/ovsdbserver-sb/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.853226 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-9f589dd8-wf2cx_1af68409-9c33-470d-96bd-abf8eb121c9d/placement-api/0.log" Dec 02 11:03:06 crc kubenswrapper[4685]: I1202 11:03:06.946747 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-9f589dd8-wf2cx_1af68409-9c33-470d-96bd-abf8eb121c9d/placement-log/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.258040 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_79bd4baf-4a03-43d7-8b3b-3a632474694e/setup-container/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.470014 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_79bd4baf-4a03-43d7-8b3b-3a632474694e/setup-container/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.526056 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_79bd4baf-4a03-43d7-8b3b-3a632474694e/rabbitmq/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.545468 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_622e7d58-a7d5-4898-a94b-ac66e3d0ee7f/setup-container/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.694812 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_622e7d58-a7d5-4898-a94b-ac66e3d0ee7f/setup-container/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.741103 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_622e7d58-a7d5-4898-a94b-ac66e3d0ee7f/rabbitmq/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.763150 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-dlxm4_a639c48a-4ce5-45e3-ae7a-22672b81443f/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:07 crc kubenswrapper[4685]: I1202 11:03:07.893999 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-8kv9r_d80fcbde-80c5-4c5c-a42d-f95348702600/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:08 crc kubenswrapper[4685]: I1202 11:03:08.744909 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-2md5s_d47b11b2-65de-45ac-9395-81ccca27d279/ssh-known-hosts-edpm-deployment/0.log" Dec 02 11:03:08 crc kubenswrapper[4685]: I1202 11:03:08.745231 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-lc6xp_003cc841-bf83-4ac8-8b56-f6d451bff580/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:08 crc kubenswrapper[4685]: I1202 11:03:08.770758 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gh9kp_4c22e2d6-7984-4d74-9202-ea57de627392/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.133359 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-nbrzh_0b83b134-73c4-447e-99a2-a49c814e589c/swift-ring-rebalance/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.137120 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-586f964d77-g52q4_4ad077a4-4937-4de8-9da4-ca0f58f3adef/proxy-server/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.147624 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-586f964d77-g52q4_4ad077a4-4937-4de8-9da4-ca0f58f3adef/proxy-httpd/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.387130 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-auditor/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.389606 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-reaper/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.404362 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-replicator/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.451782 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/account-server/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.562887 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-auditor/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.600322 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-replicator/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.634934 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-server/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.654240 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/container-updater/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.678118 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-auditor/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.877664 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-replicator/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.899740 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-expirer/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.919896 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-updater/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.920790 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/object-server/0.log" Dec 02 11:03:09 crc kubenswrapper[4685]: I1202 11:03:09.962770 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/rsync/0.log" Dec 02 11:03:10 crc kubenswrapper[4685]: I1202 11:03:10.068222 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_8a509f21-bfbc-4240-be77-1d5ca83344cf/swift-recon-cron/0.log" Dec 02 11:03:10 crc kubenswrapper[4685]: I1202 11:03:10.145407 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mcmcr_cefd0695-3aac-4aa9-b035-f6e6d22d79f7/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:10 crc kubenswrapper[4685]: I1202 11:03:10.229980 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_27023d6a-a566-4be7-9244-915ce922dd05/tempest-tests-tempest-tests-runner/0.log" Dec 02 11:03:10 crc kubenswrapper[4685]: I1202 11:03:10.352532 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_0063a6a8-b535-4747-b1bd-8680aea722af/test-operator-logs-container/0.log" Dec 02 11:03:10 crc kubenswrapper[4685]: I1202 11:03:10.427655 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-nd8sn_13eea0ea-5642-4780-9aaa-dd0148f05809/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 02 11:03:16 crc kubenswrapper[4685]: I1202 11:03:16.899339 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:03:16 crc kubenswrapper[4685]: E1202 11:03:16.900829 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:03:30 crc kubenswrapper[4685]: I1202 11:03:30.900277 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:03:30 crc kubenswrapper[4685]: E1202 11:03:30.901404 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:03:36 crc kubenswrapper[4685]: I1202 11:03:36.591085 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/util/0.log" Dec 02 11:03:36 crc kubenswrapper[4685]: I1202 11:03:36.719474 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/util/0.log" Dec 02 11:03:36 crc kubenswrapper[4685]: I1202 11:03:36.749310 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/pull/0.log" Dec 02 11:03:36 crc kubenswrapper[4685]: I1202 11:03:36.809081 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/pull/0.log" Dec 02 11:03:36 crc kubenswrapper[4685]: I1202 11:03:36.997983 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/util/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.029832 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/extract/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.038245 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_8457484d233fb1c0aa0156d60c2fc87b5ea233880bb540ec8428483a968sbh4_dc08c183-1584-4e4c-baf9-eff33de7b396/pull/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.473426 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-855qm_cccf3baf-f063-4962-8856-c80c78439b82/kube-rbac-proxy/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.513349 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7qgfl_544b6fe9-890e-4b17-8f8e-55f53d64fcf7/kube-rbac-proxy/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.536537 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-7qgfl_544b6fe9-890e-4b17-8f8e-55f53d64fcf7/manager/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.599209 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-855qm_cccf3baf-f063-4962-8856-c80c78439b82/manager/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.719716 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-9s2nf_dd4f4fd1-38b6-4732-bab1-96a522d34e53/kube-rbac-proxy/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.743060 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-9s2nf_dd4f4fd1-38b6-4732-bab1-96a522d34e53/manager/0.log" Dec 02 11:03:37 crc kubenswrapper[4685]: I1202 11:03:37.912098 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-vn545_901441b3-91d9-4edf-8955-cd5514589dec/kube-rbac-proxy/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.022462 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-vn545_901441b3-91d9-4edf-8955-cd5514589dec/manager/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.035543 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xqspp_b16756f6-29e0-4e33-8e00-f7b0e193b958/kube-rbac-proxy/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.131890 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xqspp_b16756f6-29e0-4e33-8e00-f7b0e193b958/manager/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.212647 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-xnk7k_739ae795-6209-4881-8bf6-be391a810a86/kube-rbac-proxy/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.252907 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-xnk7k_739ae795-6209-4881-8bf6-be391a810a86/manager/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.407554 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-p62sl_36189be8-91c8-4b60-90d6-050a07ae86d3/kube-rbac-proxy/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.566980 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-p62sl_36189be8-91c8-4b60-90d6-050a07ae86d3/manager/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.644491 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vz7bq_bb0c5cd2-9459-4e31-8613-f758d330dce2/kube-rbac-proxy/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.678491 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-vz7bq_bb0c5cd2-9459-4e31-8613-f758d330dce2/manager/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.780186 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-5sn8t_6513f83d-2079-477d-8976-68cb969806fe/kube-rbac-proxy/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.898214 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-5sn8t_6513f83d-2079-477d-8976-68cb969806fe/manager/0.log" Dec 02 11:03:38 crc kubenswrapper[4685]: I1202 11:03:38.955266 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-rngtk_59c1b39b-2153-4c88-9229-0e951b086fdd/kube-rbac-proxy/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.032437 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-rngtk_59c1b39b-2153-4c88-9229-0e951b086fdd/manager/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.108378 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9l2jq_bc4b6647-31ea-45d0-ac59-b8b1cef80aeb/kube-rbac-proxy/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.153940 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9l2jq_bc4b6647-31ea-45d0-ac59-b8b1cef80aeb/manager/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.314048 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7tmr8_0a4bb15b-4ada-4698-9747-dfa600f319d3/kube-rbac-proxy/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.436369 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7tmr8_0a4bb15b-4ada-4698-9747-dfa600f319d3/manager/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.504417 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bqctl_e88f923e-f7cc-4292-a49c-483be1e7848e/kube-rbac-proxy/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.610154 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bqctl_e88f923e-f7cc-4292-a49c-483be1e7848e/manager/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.676116 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-vx7jq_20b551a4-85bc-4ecb-b502-08f844a6b911/kube-rbac-proxy/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.762754 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-vx7jq_20b551a4-85bc-4ecb-b502-08f844a6b911/manager/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.904505 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd49v57k_50904294-59b3-4a71-84f9-8f171cad02e2/manager/0.log" Dec 02 11:03:39 crc kubenswrapper[4685]: I1202 11:03:39.926304 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd49v57k_50904294-59b3-4a71-84f9-8f171cad02e2/kube-rbac-proxy/0.log" Dec 02 11:03:40 crc kubenswrapper[4685]: I1202 11:03:40.372455 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-b866867cf-zf2zb_6e78ca06-e6a2-4973-a8df-593409527f5e/operator/0.log" Dec 02 11:03:40 crc kubenswrapper[4685]: I1202 11:03:40.454154 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-8mbzs_16f31204-e29f-41f1-b560-814671c5967a/registry-server/0.log" Dec 02 11:03:40 crc kubenswrapper[4685]: I1202 11:03:40.707048 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-njgr6_30be1eaf-4d63-4fb6-9372-0857432b6b73/kube-rbac-proxy/0.log" Dec 02 11:03:40 crc kubenswrapper[4685]: I1202 11:03:40.892556 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-njgr6_30be1eaf-4d63-4fb6-9372-0857432b6b73/manager/0.log" Dec 02 11:03:40 crc kubenswrapper[4685]: I1202 11:03:40.966599 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-j5hhn_1177803f-ea41-40ed-8b1d-58c6761363f0/kube-rbac-proxy/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.165582 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-j5hhn_1177803f-ea41-40ed-8b1d-58c6761363f0/manager/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.245788 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-5pwml_226039ae-3d4c-41e0-8a24-985eb9f63d27/operator/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.285125 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-59c586c68c-cvbtm_aa95a5ac-c9b5-4850-8201-b696ed655570/manager/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.418536 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-695b4bc5dc-jxvqs_e55320ae-8458-4802-aa07-e406f1b58fce/manager/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.440227 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-695b4bc5dc-jxvqs_e55320ae-8458-4802-aa07-e406f1b58fce/kube-rbac-proxy/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.512433 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-llc6z_cf795d28-4ac2-44de-9ca7-10ef8788eb80/manager/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.529844 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-llc6z_cf795d28-4ac2-44de-9ca7-10ef8788eb80/kube-rbac-proxy/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.705502 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-zsxv4_8c138433-1126-4ffa-a017-19740e566084/kube-rbac-proxy/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.795865 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2dpvv_01015eb1-ac2f-4bc1-81d3-145ce402db5e/kube-rbac-proxy/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.803469 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-zsxv4_8c138433-1126-4ffa-a017-19740e566084/manager/0.log" Dec 02 11:03:41 crc kubenswrapper[4685]: I1202 11:03:41.912478 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2dpvv_01015eb1-ac2f-4bc1-81d3-145ce402db5e/manager/0.log" Dec 02 11:03:44 crc kubenswrapper[4685]: I1202 11:03:44.900080 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:03:44 crc kubenswrapper[4685]: E1202 11:03:44.900819 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:03:58 crc kubenswrapper[4685]: I1202 11:03:58.900633 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:03:58 crc kubenswrapper[4685]: E1202 11:03:58.901541 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:04:00 crc kubenswrapper[4685]: I1202 11:04:00.124523 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rpwnx_70ecf800-4f19-4a60-834f-d72f0a1d2d37/control-plane-machine-set-operator/0.log" Dec 02 11:04:00 crc kubenswrapper[4685]: I1202 11:04:00.284409 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-5glrx_2dee0464-6eab-44a6-a33d-3b6096319ecf/kube-rbac-proxy/0.log" Dec 02 11:04:00 crc kubenswrapper[4685]: I1202 11:04:00.342969 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-5glrx_2dee0464-6eab-44a6-a33d-3b6096319ecf/machine-api-operator/0.log" Dec 02 11:04:10 crc kubenswrapper[4685]: I1202 11:04:10.899585 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:04:10 crc kubenswrapper[4685]: E1202 11:04:10.900303 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:04:13 crc kubenswrapper[4685]: I1202 11:04:13.158335 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-lt7gc_f2c74be0-fa96-4153-8159-bd67e214d601/cert-manager-controller/0.log" Dec 02 11:04:13 crc kubenswrapper[4685]: I1202 11:04:13.313689 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-2dnjg_9e280929-1daa-4ff4-b690-1192d259178d/cert-manager-cainjector/0.log" Dec 02 11:04:13 crc kubenswrapper[4685]: I1202 11:04:13.383978 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-6gfx7_379893b2-eb01-49f9-b70c-9e459c6c6ed0/cert-manager-webhook/0.log" Dec 02 11:04:24 crc kubenswrapper[4685]: I1202 11:04:24.900775 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:04:24 crc kubenswrapper[4685]: E1202 11:04:24.901751 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:04:26 crc kubenswrapper[4685]: I1202 11:04:26.189796 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-dtq9q_c8a900b2-3134-445e-8349-4ffab8e264bb/nmstate-console-plugin/0.log" Dec 02 11:04:26 crc kubenswrapper[4685]: I1202 11:04:26.397222 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wbwxh_dfaa292c-cbfc-495c-8b18-768606608d14/nmstate-handler/0.log" Dec 02 11:04:26 crc kubenswrapper[4685]: I1202 11:04:26.445611 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zshhw_b7d57eeb-24b9-461f-ab36-86ffcf831603/kube-rbac-proxy/0.log" Dec 02 11:04:26 crc kubenswrapper[4685]: I1202 11:04:26.486586 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-zshhw_b7d57eeb-24b9-461f-ab36-86ffcf831603/nmstate-metrics/0.log" Dec 02 11:04:27 crc kubenswrapper[4685]: I1202 11:04:27.371523 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-wklzz_4b250238-0336-4009-b01e-2a469bf59e33/nmstate-operator/0.log" Dec 02 11:04:27 crc kubenswrapper[4685]: I1202 11:04:27.379381 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-2khp8_8d950d2d-44b3-4441-9434-11fc6a39dca4/nmstate-webhook/0.log" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.584742 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5jdwk"] Dec 02 11:04:34 crc kubenswrapper[4685]: E1202 11:04:34.585738 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0739a66-fd7c-4b5a-9946-fd660df8e833" containerName="container-00" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.585756 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0739a66-fd7c-4b5a-9946-fd660df8e833" containerName="container-00" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.586032 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0739a66-fd7c-4b5a-9946-fd660df8e833" containerName="container-00" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.594736 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.609763 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5jdwk"] Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.732537 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-catalog-content\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.732814 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-utilities\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.732892 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8grbb\" (UniqueName: \"kubernetes.io/projected/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-kube-api-access-8grbb\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.835529 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-catalog-content\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.835656 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-utilities\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.835690 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8grbb\" (UniqueName: \"kubernetes.io/projected/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-kube-api-access-8grbb\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.836048 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-catalog-content\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.836109 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-utilities\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.854809 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8grbb\" (UniqueName: \"kubernetes.io/projected/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-kube-api-access-8grbb\") pod \"redhat-operators-5jdwk\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:34 crc kubenswrapper[4685]: I1202 11:04:34.933272 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:35 crc kubenswrapper[4685]: I1202 11:04:35.425504 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5jdwk"] Dec 02 11:04:35 crc kubenswrapper[4685]: I1202 11:04:35.676726 4685 generic.go:334] "Generic (PLEG): container finished" podID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerID="16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2" exitCode=0 Dec 02 11:04:35 crc kubenswrapper[4685]: I1202 11:04:35.677004 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerDied","Data":"16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2"} Dec 02 11:04:35 crc kubenswrapper[4685]: I1202 11:04:35.677037 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerStarted","Data":"d0ca7c0d44f79ae0cced88b280ed89e5c9dfa2722df606c95c43ee7a3985084d"} Dec 02 11:04:36 crc kubenswrapper[4685]: I1202 11:04:36.687981 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerStarted","Data":"c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5"} Dec 02 11:04:37 crc kubenswrapper[4685]: I1202 11:04:37.920926 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:04:37 crc kubenswrapper[4685]: E1202 11:04:37.921471 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:04:40 crc kubenswrapper[4685]: I1202 11:04:40.727050 4685 generic.go:334] "Generic (PLEG): container finished" podID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerID="c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5" exitCode=0 Dec 02 11:04:40 crc kubenswrapper[4685]: I1202 11:04:40.727111 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerDied","Data":"c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5"} Dec 02 11:04:42 crc kubenswrapper[4685]: I1202 11:04:42.744820 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerStarted","Data":"82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd"} Dec 02 11:04:42 crc kubenswrapper[4685]: I1202 11:04:42.765183 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5jdwk" podStartSLOduration=2.931780723 podStartE2EDuration="8.765167599s" podCreationTimestamp="2025-12-02 11:04:34 +0000 UTC" firstStartedPulling="2025-12-02 11:04:35.678608574 +0000 UTC m=+3768.050382728" lastFinishedPulling="2025-12-02 11:04:41.51199545 +0000 UTC m=+3773.883769604" observedRunningTime="2025-12-02 11:04:42.764265253 +0000 UTC m=+3775.136039407" watchObservedRunningTime="2025-12-02 11:04:42.765167599 +0000 UTC m=+3775.136941753" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.353307 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-z8v2s_acd555b8-75ea-48d0-a942-c8762e15f37c/controller/0.log" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.438203 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-z8v2s_acd555b8-75ea-48d0-a942-c8762e15f37c/kube-rbac-proxy/0.log" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.627414 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.894719 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.907633 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.947490 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 11:04:43 crc kubenswrapper[4685]: I1202 11:04:43.983492 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.174038 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.175291 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.237726 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.247438 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.448016 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-frr-files/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.527301 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-reloader/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.540711 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/controller/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.590295 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/cp-metrics/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.743152 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/frr-metrics/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.761059 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/kube-rbac-proxy/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.880605 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/kube-rbac-proxy-frr/0.log" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.933497 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:44 crc kubenswrapper[4685]: I1202 11:04:44.934339 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.056034 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/reloader/0.log" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.196603 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-gdnpk_aa6b58de-e46c-4019-9a27-2991ad81429f/frr-k8s-webhook-server/0.log" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.657689 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-685cd4986b-57sm9_9edd243d-18c6-4345-b31d-a9825a3ad745/manager/0.log" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.803020 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-jbwrt_80b410fd-737d-4aed-8095-8ea5386c9cea/frr/0.log" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.850951 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6db9d48979-ljd69_7883a47e-8804-4094-8e91-73f854896283/webhook-server/0.log" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.940554 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2mprm_e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7/kube-rbac-proxy/0.log" Dec 02 11:04:45 crc kubenswrapper[4685]: I1202 11:04:45.991319 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5jdwk" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="registry-server" probeResult="failure" output=< Dec 02 11:04:45 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 11:04:45 crc kubenswrapper[4685]: > Dec 02 11:04:46 crc kubenswrapper[4685]: I1202 11:04:46.380856 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2mprm_e6bcf4ed-6fbe-46b2-880c-e70bddd3d6f7/speaker/0.log" Dec 02 11:04:50 crc kubenswrapper[4685]: I1202 11:04:50.900509 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:04:50 crc kubenswrapper[4685]: E1202 11:04:50.901530 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:04:54 crc kubenswrapper[4685]: I1202 11:04:54.989720 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:55 crc kubenswrapper[4685]: I1202 11:04:55.065062 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:55 crc kubenswrapper[4685]: I1202 11:04:55.223358 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5jdwk"] Dec 02 11:04:56 crc kubenswrapper[4685]: I1202 11:04:56.852118 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5jdwk" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="registry-server" containerID="cri-o://82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd" gracePeriod=2 Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.276160 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.457038 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-utilities\") pod \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.457689 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-catalog-content\") pod \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.457845 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8grbb\" (UniqueName: \"kubernetes.io/projected/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-kube-api-access-8grbb\") pod \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\" (UID: \"564ac0d6-aebe-47e0-8f08-9f084f2b05a4\") " Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.457959 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-utilities" (OuterVolumeSpecName: "utilities") pod "564ac0d6-aebe-47e0-8f08-9f084f2b05a4" (UID: "564ac0d6-aebe-47e0-8f08-9f084f2b05a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.459410 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.465080 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-kube-api-access-8grbb" (OuterVolumeSpecName: "kube-api-access-8grbb") pod "564ac0d6-aebe-47e0-8f08-9f084f2b05a4" (UID: "564ac0d6-aebe-47e0-8f08-9f084f2b05a4"). InnerVolumeSpecName "kube-api-access-8grbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.561743 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8grbb\" (UniqueName: \"kubernetes.io/projected/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-kube-api-access-8grbb\") on node \"crc\" DevicePath \"\"" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.562232 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "564ac0d6-aebe-47e0-8f08-9f084f2b05a4" (UID: "564ac0d6-aebe-47e0-8f08-9f084f2b05a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.663254 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/564ac0d6-aebe-47e0-8f08-9f084f2b05a4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.861628 4685 generic.go:334] "Generic (PLEG): container finished" podID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerID="82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd" exitCode=0 Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.862518 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerDied","Data":"82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd"} Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.862628 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5jdwk" event={"ID":"564ac0d6-aebe-47e0-8f08-9f084f2b05a4","Type":"ContainerDied","Data":"d0ca7c0d44f79ae0cced88b280ed89e5c9dfa2722df606c95c43ee7a3985084d"} Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.862719 4685 scope.go:117] "RemoveContainer" containerID="82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.862919 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5jdwk" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.889359 4685 scope.go:117] "RemoveContainer" containerID="c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.899403 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5jdwk"] Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.913810 4685 scope.go:117] "RemoveContainer" containerID="16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.917200 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5jdwk"] Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.953839 4685 scope.go:117] "RemoveContainer" containerID="82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd" Dec 02 11:04:57 crc kubenswrapper[4685]: E1202 11:04:57.954227 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd\": container with ID starting with 82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd not found: ID does not exist" containerID="82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.954329 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd"} err="failed to get container status \"82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd\": rpc error: code = NotFound desc = could not find container \"82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd\": container with ID starting with 82c05d144764631bf734ffb46c4d72fd8687ebd51a1039a891452bf404c2d9bd not found: ID does not exist" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.954441 4685 scope.go:117] "RemoveContainer" containerID="c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5" Dec 02 11:04:57 crc kubenswrapper[4685]: E1202 11:04:57.954778 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5\": container with ID starting with c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5 not found: ID does not exist" containerID="c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.954806 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5"} err="failed to get container status \"c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5\": rpc error: code = NotFound desc = could not find container \"c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5\": container with ID starting with c26625524e0833bb75751ac901bdf89d4556e21348b8747053bbe64c3b5d98a5 not found: ID does not exist" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.954830 4685 scope.go:117] "RemoveContainer" containerID="16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2" Dec 02 11:04:57 crc kubenswrapper[4685]: E1202 11:04:57.955029 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2\": container with ID starting with 16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2 not found: ID does not exist" containerID="16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2" Dec 02 11:04:57 crc kubenswrapper[4685]: I1202 11:04:57.955061 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2"} err="failed to get container status \"16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2\": rpc error: code = NotFound desc = could not find container \"16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2\": container with ID starting with 16719ed494a0d128b866a6e60dde5753837b20c42573886e30702d21e85bd0b2 not found: ID does not exist" Dec 02 11:04:59 crc kubenswrapper[4685]: I1202 11:04:59.911822 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" path="/var/lib/kubelet/pods/564ac0d6-aebe-47e0-8f08-9f084f2b05a4/volumes" Dec 02 11:05:00 crc kubenswrapper[4685]: I1202 11:05:00.555867 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/util/0.log" Dec 02 11:05:00 crc kubenswrapper[4685]: I1202 11:05:00.847208 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/util/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.011506 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/pull/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.051477 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/pull/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.281463 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/util/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.334656 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/extract/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.335136 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ftf4pq_ae4a80df-9f50-4df0-8376-975da5b85e38/pull/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.528139 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/util/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.872148 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/pull/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.897621 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/util/0.log" Dec 02 11:05:01 crc kubenswrapper[4685]: I1202 11:05:01.982885 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/pull/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.207438 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/util/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.222972 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/pull/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.268428 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83vx2mc_a8daea87-dd6c-4d94-bc95-69ee4a7813a9/extract/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.383980 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-utilities/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.593938 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-utilities/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.600586 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-content/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.621497 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-content/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.808034 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-content/0.log" Dec 02 11:05:02 crc kubenswrapper[4685]: I1202 11:05:02.843425 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/extract-utilities/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.131354 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cgv9j_f138872d-e96a-4dcb-8400-c2d5982dd07c/registry-server/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.145469 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-utilities/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.304574 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-utilities/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.315045 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-content/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.337437 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-content/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.629235 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-content/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.637854 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/extract-utilities/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.830742 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-f6vtc_01a29fff-fc06-40a0-bf72-a2290597aeaa/registry-server/0.log" Dec 02 11:05:03 crc kubenswrapper[4685]: I1202 11:05:03.901662 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:05:03 crc kubenswrapper[4685]: E1202 11:05:03.901890 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.157256 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hxbft_267f61a3-d674-4855-9f76-b5c7edb14ed1/marketplace-operator/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.195244 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-utilities/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.317792 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-content/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.341139 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-utilities/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.422369 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-content/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.632588 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-content/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.645576 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/extract-utilities/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.754825 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-utilities/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.779121 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-m54gz_3bb5a867-9c67-423f-9780-97a59d6cc9da/registry-server/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.949778 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-utilities/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.951578 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-content/0.log" Dec 02 11:05:04 crc kubenswrapper[4685]: I1202 11:05:04.970463 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-content/0.log" Dec 02 11:05:05 crc kubenswrapper[4685]: I1202 11:05:05.146003 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-utilities/0.log" Dec 02 11:05:05 crc kubenswrapper[4685]: I1202 11:05:05.182471 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/extract-content/0.log" Dec 02 11:05:05 crc kubenswrapper[4685]: I1202 11:05:05.582830 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-csktt_5670a146-6fa3-4825-a054-77dc530bf1c4/registry-server/0.log" Dec 02 11:05:15 crc kubenswrapper[4685]: I1202 11:05:15.900520 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:05:15 crc kubenswrapper[4685]: E1202 11:05:15.901170 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:05:30 crc kubenswrapper[4685]: I1202 11:05:30.899958 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:05:30 crc kubenswrapper[4685]: E1202 11:05:30.900766 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.150109 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-czfrf"] Dec 02 11:05:31 crc kubenswrapper[4685]: E1202 11:05:31.150510 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="registry-server" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.150526 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="registry-server" Dec 02 11:05:31 crc kubenswrapper[4685]: E1202 11:05:31.150544 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="extract-utilities" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.150551 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="extract-utilities" Dec 02 11:05:31 crc kubenswrapper[4685]: E1202 11:05:31.150606 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="extract-content" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.150613 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="extract-content" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.150789 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="564ac0d6-aebe-47e0-8f08-9f084f2b05a4" containerName="registry-server" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.152184 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.187052 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-czfrf"] Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.302178 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-catalog-content\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.302387 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftfxq\" (UniqueName: \"kubernetes.io/projected/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-kube-api-access-ftfxq\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.302697 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-utilities\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.404671 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-utilities\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.404731 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-catalog-content\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.404777 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftfxq\" (UniqueName: \"kubernetes.io/projected/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-kube-api-access-ftfxq\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.405415 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-utilities\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.405638 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-catalog-content\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.438311 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftfxq\" (UniqueName: \"kubernetes.io/projected/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-kube-api-access-ftfxq\") pod \"certified-operators-czfrf\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:31 crc kubenswrapper[4685]: I1202 11:05:31.469728 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:32 crc kubenswrapper[4685]: I1202 11:05:32.052791 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-czfrf"] Dec 02 11:05:32 crc kubenswrapper[4685]: I1202 11:05:32.172061 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerStarted","Data":"db4bc36b04aa6c8a2fbb867f3bf6d02242b6f116b55b8067e3685f34e29b5bf5"} Dec 02 11:05:33 crc kubenswrapper[4685]: I1202 11:05:33.192480 4685 generic.go:334] "Generic (PLEG): container finished" podID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerID="7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790" exitCode=0 Dec 02 11:05:33 crc kubenswrapper[4685]: I1202 11:05:33.192619 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerDied","Data":"7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790"} Dec 02 11:05:33 crc kubenswrapper[4685]: I1202 11:05:33.194461 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 11:05:34 crc kubenswrapper[4685]: I1202 11:05:34.201578 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerStarted","Data":"8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b"} Dec 02 11:05:36 crc kubenswrapper[4685]: I1202 11:05:36.227899 4685 generic.go:334] "Generic (PLEG): container finished" podID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerID="8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b" exitCode=0 Dec 02 11:05:36 crc kubenswrapper[4685]: I1202 11:05:36.227959 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerDied","Data":"8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b"} Dec 02 11:05:37 crc kubenswrapper[4685]: I1202 11:05:37.239726 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerStarted","Data":"6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad"} Dec 02 11:05:41 crc kubenswrapper[4685]: I1202 11:05:41.470769 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:41 crc kubenswrapper[4685]: I1202 11:05:41.471145 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:42 crc kubenswrapper[4685]: I1202 11:05:42.516719 4685 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-czfrf" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="registry-server" probeResult="failure" output=< Dec 02 11:05:42 crc kubenswrapper[4685]: timeout: failed to connect service ":50051" within 1s Dec 02 11:05:42 crc kubenswrapper[4685]: > Dec 02 11:05:45 crc kubenswrapper[4685]: I1202 11:05:45.899848 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:05:45 crc kubenswrapper[4685]: E1202 11:05:45.900374 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:05:51 crc kubenswrapper[4685]: I1202 11:05:51.531542 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:51 crc kubenswrapper[4685]: I1202 11:05:51.554942 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-czfrf" podStartSLOduration=17.113109959 podStartE2EDuration="20.554909731s" podCreationTimestamp="2025-12-02 11:05:31 +0000 UTC" firstStartedPulling="2025-12-02 11:05:33.194278316 +0000 UTC m=+3825.566052470" lastFinishedPulling="2025-12-02 11:05:36.636078088 +0000 UTC m=+3829.007852242" observedRunningTime="2025-12-02 11:05:37.258268852 +0000 UTC m=+3829.630043006" watchObservedRunningTime="2025-12-02 11:05:51.554909731 +0000 UTC m=+3843.926683885" Dec 02 11:05:51 crc kubenswrapper[4685]: I1202 11:05:51.613634 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:51 crc kubenswrapper[4685]: I1202 11:05:51.769096 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-czfrf"] Dec 02 11:05:53 crc kubenswrapper[4685]: I1202 11:05:53.421991 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-czfrf" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="registry-server" containerID="cri-o://6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad" gracePeriod=2 Dec 02 11:05:53 crc kubenswrapper[4685]: I1202 11:05:53.940641 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.091140 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-utilities\") pod \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.091189 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-catalog-content\") pod \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.091348 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftfxq\" (UniqueName: \"kubernetes.io/projected/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-kube-api-access-ftfxq\") pod \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\" (UID: \"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c\") " Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.091910 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-utilities" (OuterVolumeSpecName: "utilities") pod "eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" (UID: "eedd01f5-dd7d-47ee-8d83-4f2705dbee7c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.100965 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-kube-api-access-ftfxq" (OuterVolumeSpecName: "kube-api-access-ftfxq") pod "eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" (UID: "eedd01f5-dd7d-47ee-8d83-4f2705dbee7c"). InnerVolumeSpecName "kube-api-access-ftfxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.166812 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" (UID: "eedd01f5-dd7d-47ee-8d83-4f2705dbee7c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.193328 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftfxq\" (UniqueName: \"kubernetes.io/projected/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-kube-api-access-ftfxq\") on node \"crc\" DevicePath \"\"" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.193430 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.193444 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.431714 4685 generic.go:334] "Generic (PLEG): container finished" podID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerID="6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad" exitCode=0 Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.431752 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerDied","Data":"6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad"} Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.431777 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czfrf" event={"ID":"eedd01f5-dd7d-47ee-8d83-4f2705dbee7c","Type":"ContainerDied","Data":"db4bc36b04aa6c8a2fbb867f3bf6d02242b6f116b55b8067e3685f34e29b5bf5"} Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.431788 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czfrf" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.431795 4685 scope.go:117] "RemoveContainer" containerID="6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.464257 4685 scope.go:117] "RemoveContainer" containerID="8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.468918 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-czfrf"] Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.478833 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-czfrf"] Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.484703 4685 scope.go:117] "RemoveContainer" containerID="7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.527185 4685 scope.go:117] "RemoveContainer" containerID="6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad" Dec 02 11:05:54 crc kubenswrapper[4685]: E1202 11:05:54.527532 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad\": container with ID starting with 6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad not found: ID does not exist" containerID="6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.527668 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad"} err="failed to get container status \"6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad\": rpc error: code = NotFound desc = could not find container \"6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad\": container with ID starting with 6f67aa5d0b385dbd8898f80b54015f77171d3b0745c06c29b6f9863b30b17cad not found: ID does not exist" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.527772 4685 scope.go:117] "RemoveContainer" containerID="8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b" Dec 02 11:05:54 crc kubenswrapper[4685]: E1202 11:05:54.528093 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b\": container with ID starting with 8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b not found: ID does not exist" containerID="8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.528186 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b"} err="failed to get container status \"8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b\": rpc error: code = NotFound desc = could not find container \"8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b\": container with ID starting with 8c97ad2cf0d8624d4086994d72b14adb508bd58bfe3b5da0e793f91af5cc447b not found: ID does not exist" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.528271 4685 scope.go:117] "RemoveContainer" containerID="7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790" Dec 02 11:05:54 crc kubenswrapper[4685]: E1202 11:05:54.528700 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790\": container with ID starting with 7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790 not found: ID does not exist" containerID="7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790" Dec 02 11:05:54 crc kubenswrapper[4685]: I1202 11:05:54.528937 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790"} err="failed to get container status \"7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790\": rpc error: code = NotFound desc = could not find container \"7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790\": container with ID starting with 7f24034f34bd74b3da99cfb8fc52c9a299582e445a51ee778a290e736c0bb790 not found: ID does not exist" Dec 02 11:05:55 crc kubenswrapper[4685]: I1202 11:05:55.909810 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" path="/var/lib/kubelet/pods/eedd01f5-dd7d-47ee-8d83-4f2705dbee7c/volumes" Dec 02 11:05:59 crc kubenswrapper[4685]: I1202 11:05:59.900606 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:05:59 crc kubenswrapper[4685]: E1202 11:05:59.901198 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:06:13 crc kubenswrapper[4685]: I1202 11:06:13.904713 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:06:13 crc kubenswrapper[4685]: E1202 11:06:13.905649 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:06:24 crc kubenswrapper[4685]: I1202 11:06:24.900090 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:06:24 crc kubenswrapper[4685]: E1202 11:06:24.901212 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:06:36 crc kubenswrapper[4685]: I1202 11:06:36.900504 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:06:36 crc kubenswrapper[4685]: E1202 11:06:36.901430 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:06:47 crc kubenswrapper[4685]: I1202 11:06:47.924412 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:06:47 crc kubenswrapper[4685]: E1202 11:06:47.925492 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:06:58 crc kubenswrapper[4685]: I1202 11:06:58.578493 4685 generic.go:334] "Generic (PLEG): container finished" podID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerID="b4cfe9510147f450c77603e18e072c253fe995482a4728b06ae02f6a6defc630" exitCode=0 Dec 02 11:06:58 crc kubenswrapper[4685]: I1202 11:06:58.578582 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-f7twl/must-gather-hds9f" event={"ID":"4a595559-df01-40d0-ad8b-e781bad99ce1","Type":"ContainerDied","Data":"b4cfe9510147f450c77603e18e072c253fe995482a4728b06ae02f6a6defc630"} Dec 02 11:06:58 crc kubenswrapper[4685]: I1202 11:06:58.579393 4685 scope.go:117] "RemoveContainer" containerID="b4cfe9510147f450c77603e18e072c253fe995482a4728b06ae02f6a6defc630" Dec 02 11:06:58 crc kubenswrapper[4685]: I1202 11:06:58.734136 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f7twl_must-gather-hds9f_4a595559-df01-40d0-ad8b-e781bad99ce1/gather/0.log" Dec 02 11:07:01 crc kubenswrapper[4685]: I1202 11:07:01.900321 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:07:01 crc kubenswrapper[4685]: E1202 11:07:01.902382 4685 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7b6sn_openshift-machine-config-operator(022e694c-9367-4013-8ee9-65ff856e8eec)\"" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.387354 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-f7twl/must-gather-hds9f"] Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.388276 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-f7twl/must-gather-hds9f" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="copy" containerID="cri-o://06d54a191e75a1f75ca94dc22023e8087ab425a1b8c367a246ad2a9fe21328cb" gracePeriod=2 Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.402087 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-f7twl/must-gather-hds9f"] Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.712417 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f7twl_must-gather-hds9f_4a595559-df01-40d0-ad8b-e781bad99ce1/copy/0.log" Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.713646 4685 generic.go:334] "Generic (PLEG): container finished" podID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerID="06d54a191e75a1f75ca94dc22023e8087ab425a1b8c367a246ad2a9fe21328cb" exitCode=143 Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.837792 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f7twl_must-gather-hds9f_4a595559-df01-40d0-ad8b-e781bad99ce1/copy/0.log" Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.838400 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.919400 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4a595559-df01-40d0-ad8b-e781bad99ce1-must-gather-output\") pod \"4a595559-df01-40d0-ad8b-e781bad99ce1\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.919469 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2px4g\" (UniqueName: \"kubernetes.io/projected/4a595559-df01-40d0-ad8b-e781bad99ce1-kube-api-access-2px4g\") pod \"4a595559-df01-40d0-ad8b-e781bad99ce1\" (UID: \"4a595559-df01-40d0-ad8b-e781bad99ce1\") " Dec 02 11:07:09 crc kubenswrapper[4685]: I1202 11:07:09.932812 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a595559-df01-40d0-ad8b-e781bad99ce1-kube-api-access-2px4g" (OuterVolumeSpecName: "kube-api-access-2px4g") pod "4a595559-df01-40d0-ad8b-e781bad99ce1" (UID: "4a595559-df01-40d0-ad8b-e781bad99ce1"). InnerVolumeSpecName "kube-api-access-2px4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.021253 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2px4g\" (UniqueName: \"kubernetes.io/projected/4a595559-df01-40d0-ad8b-e781bad99ce1-kube-api-access-2px4g\") on node \"crc\" DevicePath \"\"" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.062617 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a595559-df01-40d0-ad8b-e781bad99ce1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4a595559-df01-40d0-ad8b-e781bad99ce1" (UID: "4a595559-df01-40d0-ad8b-e781bad99ce1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.122410 4685 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4a595559-df01-40d0-ad8b-e781bad99ce1-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.728243 4685 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-f7twl_must-gather-hds9f_4a595559-df01-40d0-ad8b-e781bad99ce1/copy/0.log" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.728770 4685 scope.go:117] "RemoveContainer" containerID="06d54a191e75a1f75ca94dc22023e8087ab425a1b8c367a246ad2a9fe21328cb" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.730441 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-f7twl/must-gather-hds9f" Dec 02 11:07:10 crc kubenswrapper[4685]: I1202 11:07:10.758714 4685 scope.go:117] "RemoveContainer" containerID="b4cfe9510147f450c77603e18e072c253fe995482a4728b06ae02f6a6defc630" Dec 02 11:07:11 crc kubenswrapper[4685]: I1202 11:07:11.911573 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" path="/var/lib/kubelet/pods/4a595559-df01-40d0-ad8b-e781bad99ce1/volumes" Dec 02 11:07:13 crc kubenswrapper[4685]: I1202 11:07:13.901698 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:07:14 crc kubenswrapper[4685]: I1202 11:07:14.782073 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"a8ccdb3221c4d03c6aa6dc149a5dcfd339e511c17ccef6136ae26006e7ada36e"} Dec 02 11:08:05 crc kubenswrapper[4685]: I1202 11:08:05.165878 4685 scope.go:117] "RemoveContainer" containerID="48ebd2910ee69667f19a9bebb9aa075289c604cb535f11638290b840d2b5824f" Dec 02 11:09:42 crc kubenswrapper[4685]: I1202 11:09:42.148104 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:09:42 crc kubenswrapper[4685]: I1202 11:09:42.148753 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:10:12 crc kubenswrapper[4685]: I1202 11:10:12.147378 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:10:12 crc kubenswrapper[4685]: I1202 11:10:12.147958 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.147247 4685 patch_prober.go:28] interesting pod/machine-config-daemon-7b6sn container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.147789 4685 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.147847 4685 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.148680 4685 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a8ccdb3221c4d03c6aa6dc149a5dcfd339e511c17ccef6136ae26006e7ada36e"} pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.148741 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" podUID="022e694c-9367-4013-8ee9-65ff856e8eec" containerName="machine-config-daemon" containerID="cri-o://a8ccdb3221c4d03c6aa6dc149a5dcfd339e511c17ccef6136ae26006e7ada36e" gracePeriod=600 Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.847668 4685 generic.go:334] "Generic (PLEG): container finished" podID="022e694c-9367-4013-8ee9-65ff856e8eec" containerID="a8ccdb3221c4d03c6aa6dc149a5dcfd339e511c17ccef6136ae26006e7ada36e" exitCode=0 Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.847798 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerDied","Data":"a8ccdb3221c4d03c6aa6dc149a5dcfd339e511c17ccef6136ae26006e7ada36e"} Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.848013 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7b6sn" event={"ID":"022e694c-9367-4013-8ee9-65ff856e8eec","Type":"ContainerStarted","Data":"884208381b3302e786aba8fbaf9aeb5fc8840cfcc2371d6b2da952b387643efd"} Dec 02 11:10:42 crc kubenswrapper[4685]: I1202 11:10:42.848053 4685 scope.go:117] "RemoveContainer" containerID="3771139f6a8d8cd3c8dbeef3811de2153731f10f2e63fdae5a6fa3ce26ccaece" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.902921 4685 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xhxmc"] Dec 02 11:11:26 crc kubenswrapper[4685]: E1202 11:11:26.905827 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="copy" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.905949 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="copy" Dec 02 11:11:26 crc kubenswrapper[4685]: E1202 11:11:26.906050 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="gather" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.906131 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="gather" Dec 02 11:11:26 crc kubenswrapper[4685]: E1202 11:11:26.906220 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="extract-utilities" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.906310 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="extract-utilities" Dec 02 11:11:26 crc kubenswrapper[4685]: E1202 11:11:26.906430 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="registry-server" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.906512 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="registry-server" Dec 02 11:11:26 crc kubenswrapper[4685]: E1202 11:11:26.906982 4685 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="extract-content" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.907076 4685 state_mem.go:107] "Deleted CPUSet assignment" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="extract-content" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.907433 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="eedd01f5-dd7d-47ee-8d83-4f2705dbee7c" containerName="registry-server" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.907579 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="gather" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.907691 4685 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a595559-df01-40d0-ad8b-e781bad99ce1" containerName="copy" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.911018 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:26 crc kubenswrapper[4685]: I1202 11:11:26.945648 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhxmc"] Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.051962 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-utilities\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.052094 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-catalog-content\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.052459 4685 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8cxh\" (UniqueName: \"kubernetes.io/projected/e7de249b-7d34-4b25-aa66-3cd30137753e-kube-api-access-d8cxh\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.154610 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8cxh\" (UniqueName: \"kubernetes.io/projected/e7de249b-7d34-4b25-aa66-3cd30137753e-kube-api-access-d8cxh\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.154679 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-utilities\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.154722 4685 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-catalog-content\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.155300 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-catalog-content\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.155316 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-utilities\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.174410 4685 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8cxh\" (UniqueName: \"kubernetes.io/projected/e7de249b-7d34-4b25-aa66-3cd30137753e-kube-api-access-d8cxh\") pod \"redhat-marketplace-xhxmc\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.246919 4685 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:27 crc kubenswrapper[4685]: I1202 11:11:27.739979 4685 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhxmc"] Dec 02 11:11:27 crc kubenswrapper[4685]: W1202 11:11:27.744911 4685 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7de249b_7d34_4b25_aa66_3cd30137753e.slice/crio-ea7eab099a068457b157d27103dab5e99e6ee08c22f38e56924db857b50ba1aa WatchSource:0}: Error finding container ea7eab099a068457b157d27103dab5e99e6ee08c22f38e56924db857b50ba1aa: Status 404 returned error can't find the container with id ea7eab099a068457b157d27103dab5e99e6ee08c22f38e56924db857b50ba1aa Dec 02 11:11:28 crc kubenswrapper[4685]: I1202 11:11:28.349717 4685 generic.go:334] "Generic (PLEG): container finished" podID="e7de249b-7d34-4b25-aa66-3cd30137753e" containerID="b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f" exitCode=0 Dec 02 11:11:28 crc kubenswrapper[4685]: I1202 11:11:28.349811 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhxmc" event={"ID":"e7de249b-7d34-4b25-aa66-3cd30137753e","Type":"ContainerDied","Data":"b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f"} Dec 02 11:11:28 crc kubenswrapper[4685]: I1202 11:11:28.349842 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhxmc" event={"ID":"e7de249b-7d34-4b25-aa66-3cd30137753e","Type":"ContainerStarted","Data":"ea7eab099a068457b157d27103dab5e99e6ee08c22f38e56924db857b50ba1aa"} Dec 02 11:11:28 crc kubenswrapper[4685]: I1202 11:11:28.354060 4685 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 02 11:11:30 crc kubenswrapper[4685]: I1202 11:11:30.377882 4685 generic.go:334] "Generic (PLEG): container finished" podID="e7de249b-7d34-4b25-aa66-3cd30137753e" containerID="240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30" exitCode=0 Dec 02 11:11:30 crc kubenswrapper[4685]: I1202 11:11:30.378072 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhxmc" event={"ID":"e7de249b-7d34-4b25-aa66-3cd30137753e","Type":"ContainerDied","Data":"240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30"} Dec 02 11:11:31 crc kubenswrapper[4685]: I1202 11:11:31.389474 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhxmc" event={"ID":"e7de249b-7d34-4b25-aa66-3cd30137753e","Type":"ContainerStarted","Data":"03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566"} Dec 02 11:11:31 crc kubenswrapper[4685]: I1202 11:11:31.412460 4685 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xhxmc" podStartSLOduration=2.686041188 podStartE2EDuration="5.41244019s" podCreationTimestamp="2025-12-02 11:11:26 +0000 UTC" firstStartedPulling="2025-12-02 11:11:28.353607949 +0000 UTC m=+4180.725382133" lastFinishedPulling="2025-12-02 11:11:31.080006981 +0000 UTC m=+4183.451781135" observedRunningTime="2025-12-02 11:11:31.407247228 +0000 UTC m=+4183.779021392" watchObservedRunningTime="2025-12-02 11:11:31.41244019 +0000 UTC m=+4183.784214344" Dec 02 11:11:37 crc kubenswrapper[4685]: I1202 11:11:37.247181 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:37 crc kubenswrapper[4685]: I1202 11:11:37.247664 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:37 crc kubenswrapper[4685]: I1202 11:11:37.310615 4685 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:37 crc kubenswrapper[4685]: I1202 11:11:37.498822 4685 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:37 crc kubenswrapper[4685]: I1202 11:11:37.561748 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhxmc"] Dec 02 11:11:39 crc kubenswrapper[4685]: I1202 11:11:39.463433 4685 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xhxmc" podUID="e7de249b-7d34-4b25-aa66-3cd30137753e" containerName="registry-server" containerID="cri-o://03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566" gracePeriod=2 Dec 02 11:11:39 crc kubenswrapper[4685]: I1202 11:11:39.940474 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.016021 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-utilities\") pod \"e7de249b-7d34-4b25-aa66-3cd30137753e\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.016209 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-catalog-content\") pod \"e7de249b-7d34-4b25-aa66-3cd30137753e\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.016276 4685 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8cxh\" (UniqueName: \"kubernetes.io/projected/e7de249b-7d34-4b25-aa66-3cd30137753e-kube-api-access-d8cxh\") pod \"e7de249b-7d34-4b25-aa66-3cd30137753e\" (UID: \"e7de249b-7d34-4b25-aa66-3cd30137753e\") " Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.017410 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-utilities" (OuterVolumeSpecName: "utilities") pod "e7de249b-7d34-4b25-aa66-3cd30137753e" (UID: "e7de249b-7d34-4b25-aa66-3cd30137753e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.025573 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7de249b-7d34-4b25-aa66-3cd30137753e-kube-api-access-d8cxh" (OuterVolumeSpecName: "kube-api-access-d8cxh") pod "e7de249b-7d34-4b25-aa66-3cd30137753e" (UID: "e7de249b-7d34-4b25-aa66-3cd30137753e"). InnerVolumeSpecName "kube-api-access-d8cxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.041250 4685 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7de249b-7d34-4b25-aa66-3cd30137753e" (UID: "e7de249b-7d34-4b25-aa66-3cd30137753e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.119030 4685 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-utilities\") on node \"crc\" DevicePath \"\"" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.119074 4685 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7de249b-7d34-4b25-aa66-3cd30137753e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.119104 4685 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8cxh\" (UniqueName: \"kubernetes.io/projected/e7de249b-7d34-4b25-aa66-3cd30137753e-kube-api-access-d8cxh\") on node \"crc\" DevicePath \"\"" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.471106 4685 generic.go:334] "Generic (PLEG): container finished" podID="e7de249b-7d34-4b25-aa66-3cd30137753e" containerID="03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566" exitCode=0 Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.471146 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhxmc" event={"ID":"e7de249b-7d34-4b25-aa66-3cd30137753e","Type":"ContainerDied","Data":"03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566"} Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.471169 4685 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xhxmc" event={"ID":"e7de249b-7d34-4b25-aa66-3cd30137753e","Type":"ContainerDied","Data":"ea7eab099a068457b157d27103dab5e99e6ee08c22f38e56924db857b50ba1aa"} Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.471184 4685 scope.go:117] "RemoveContainer" containerID="03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.471297 4685 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xhxmc" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.496676 4685 scope.go:117] "RemoveContainer" containerID="240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30" Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.515876 4685 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhxmc"] Dec 02 11:11:40 crc kubenswrapper[4685]: I1202 11:11:40.525970 4685 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xhxmc"] Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.117375 4685 scope.go:117] "RemoveContainer" containerID="b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.158534 4685 scope.go:117] "RemoveContainer" containerID="03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566" Dec 02 11:11:41 crc kubenswrapper[4685]: E1202 11:11:41.159130 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566\": container with ID starting with 03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566 not found: ID does not exist" containerID="03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.159200 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566"} err="failed to get container status \"03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566\": rpc error: code = NotFound desc = could not find container \"03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566\": container with ID starting with 03755149bfbcdabf780f9d00df62812198b99e8e7d7a3e2b526b2694cce5f566 not found: ID does not exist" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.159231 4685 scope.go:117] "RemoveContainer" containerID="240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30" Dec 02 11:11:41 crc kubenswrapper[4685]: E1202 11:11:41.159738 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30\": container with ID starting with 240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30 not found: ID does not exist" containerID="240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.159783 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30"} err="failed to get container status \"240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30\": rpc error: code = NotFound desc = could not find container \"240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30\": container with ID starting with 240abed0230e525f7c6d9408086f1b8f22471e49d15f540270a146f69c878a30 not found: ID does not exist" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.159814 4685 scope.go:117] "RemoveContainer" containerID="b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f" Dec 02 11:11:41 crc kubenswrapper[4685]: E1202 11:11:41.160205 4685 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f\": container with ID starting with b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f not found: ID does not exist" containerID="b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.160267 4685 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f"} err="failed to get container status \"b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f\": rpc error: code = NotFound desc = could not find container \"b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f\": container with ID starting with b65b747674c4310142f502b874f3f2488945f564f8c957dcb313785f851f318f not found: ID does not exist" Dec 02 11:11:41 crc kubenswrapper[4685]: I1202 11:11:41.910446 4685 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7de249b-7d34-4b25-aa66-3cd30137753e" path="/var/lib/kubelet/pods/e7de249b-7d34-4b25-aa66-3cd30137753e/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113544566024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113544567017375 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113534033016503 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113534034015454 5ustar corecore